From python-checkins at python.org Thu Aug 1 00:12:15 2013 From: python-checkins at python.org (christian.heimes) Date: Thu, 1 Aug 2013 00:12:15 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_Initialize_uti?= =?utf-8?q?me_with_0=2E_It_fixes_a_couple_of_compiler_warnung=3A?= Message-ID: <3c584v6G06zRGl@mail.python.org> http://hg.python.org/cpython/rev/279c8c8e433d changeset: 84942:279c8c8e433d branch: 3.3 parent: 84940:e1816ec67143 user: Christian Heimes date: Thu Aug 01 00:08:16 2013 +0200 summary: Initialize utime with 0. It fixes a couple of compiler warnung: warning: 'utime.mtime_ns' may be used uninitialized in this function [-Wmaybe-uninitialized] files: Modules/posixmodule.c | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4751,6 +4751,7 @@ PyObject *return_value = NULL; memset(&path, 0, sizeof(path)); + memset(&utime, 0, sizeof(utime_t)); #if UTIME_HAVE_FD path.allow_fd = 1; #endif -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 00:12:17 2013 From: python-checkins at python.org (christian.heimes) Date: Thu, 1 Aug 2013 00:12:17 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Initialize_utime_with_0=2E_It_fixes_a_couple_of_compiler?= =?utf-8?q?_warnung=3A?= Message-ID: <3c584x14PczRxV@mail.python.org> http://hg.python.org/cpython/rev/967e3cb22baf changeset: 84943:967e3cb22baf parent: 84941:d5cf4f973602 parent: 84942:279c8c8e433d user: Christian Heimes date: Thu Aug 01 00:12:06 2013 +0200 summary: Initialize utime with 0. It fixes a couple of compiler warnung: warning: 'utime.mtime_ns' may be used uninitialized in this function [-Wmaybe-uninitialized] files: Modules/posixmodule.c | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4494,6 +4494,7 @@ memset(&path, 0, sizeof(path)); path.function_name = "utime"; + memset(&utime, 0, sizeof(utime_t)); #if UTIME_HAVE_FD path.allow_fd = 1; #endif -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 02:48:43 2013 From: python-checkins at python.org (r.david.murray) Date: Thu, 1 Aug 2013 02:48:43 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_=2317616=3A_Improve_contex?= =?utf-8?q?t_manager_tests=2C_fix_bugs_in_close_method_and_mode_docs=2E?= Message-ID: <3c5CYR1KQ4z7Ljx@mail.python.org> http://hg.python.org/cpython/rev/4f3b6eff2ede changeset: 84944:4f3b6eff2ede user: R David Murray date: Wed Jul 31 20:48:26 2013 -0400 summary: #17616: Improve context manager tests, fix bugs in close method and mode docs. 'mode' docs fix: the file must always be opened in binary in Python3. Bug in Wave_write.close: when the close method calls the check that the header exists and it raises an error, the _file attribute never gets set to None, so the next close tries to close the file again and we get an ignored traceback in the __del__ method. The fix is to set _file to None in a finally clause. This represents a behavior change...in theory a program could be checking for the error on close and then doing a recovery action on the still open file and closing it again. But this change will only go into 3.4, so I think that behavior change is acceptable given that it would be pretty weird and unlikely logic to begin with. files: Doc/library/wave.rst | 15 ++++---- Lib/test/test_wave.py | 53 +++++++++++++++++++++++------- Lib/wave.py | 12 ++++-- 3 files changed, 54 insertions(+), 26 deletions(-) diff --git a/Doc/library/wave.rst b/Doc/library/wave.rst --- a/Doc/library/wave.rst +++ b/Doc/library/wave.rst @@ -19,21 +19,20 @@ .. function:: open(file, mode=None) If *file* is a string, open the file by that name, otherwise treat it as a - seekable file-like object. *mode* can be any of + seekable file-like object. *mode* can be: - ``'r'``, ``'rb'`` + ``'rb'`` Read only mode. - ``'w'``, ``'wb'`` + ``'wb'`` Write only mode. Note that it does not allow read/write WAV files. - A *mode* of ``'r'`` or ``'rb'`` returns a :class:`Wave_read` object, while a - *mode* of ``'w'`` or ``'wb'`` returns a :class:`Wave_write` object. If - *mode* is omitted and a file-like object is passed as *file*, ``file.mode`` - is used as the default value for *mode* (the ``'b'`` flag is still added if - necessary). + A *mode* of ``'rb'`` returns a :class:`Wave_read` object, while a *mode* of + ``'wb'`` returns a :class:`Wave_write` object. If *mode* is omitted and a + file-like object is passed as *file*, ``file.mode`` is used as the default + value for *mode*. If you pass in a file-like object, the wave object will not close it when its :meth:`close` method is called; it is the caller's responsibility to close diff --git a/Lib/test/test_wave.py b/Lib/test/test_wave.py --- a/Lib/test/test_wave.py +++ b/Lib/test/test_wave.py @@ -69,22 +69,49 @@ self.assertEqual(params.comptype, self.f.getcomptype()) self.assertEqual(params.compname, self.f.getcompname()) - def test_context_manager(self): - self.f = wave.open(TESTFN, 'wb') - self.f.setnchannels(nchannels) - self.f.setsampwidth(sampwidth) - self.f.setframerate(framerate) - self.f.close() + def test_wave_write_context_manager_calls_close(self): + # Close checks for a minimum header and will raise an error + # if it is not set, so this proves that close is called. + with self.assertRaises(wave.Error): + with wave.open(TESTFN, 'wb') as f: + pass + print('in test:', f._file) + with self.assertRaises(wave.Error): + with open(TESTFN, 'wb') as testfile: + with wave.open(testfile): + pass + def test_context_manager_with_open_file(self): + with open(TESTFN, 'wb') as testfile: + with wave.open(testfile) as f: + f.setnchannels(nchannels) + f.setsampwidth(sampwidth) + f.setframerate(framerate) + self.assertFalse(testfile.closed) + with open(TESTFN, 'rb') as testfile: + with wave.open(testfile) as f: + self.assertFalse(f.getfp().closed) + params = f.getparams() + self.assertEqual(params.nchannels, nchannels) + self.assertEqual(params.sampwidth, sampwidth) + self.assertEqual(params.framerate, framerate) + self.assertIsNone(f.getfp()) + self.assertFalse(testfile.closed) + + def test_context_manager_with_filename(self): + # If the file doesn't get closed, this test won't fail, but it will + # produce a resource leak warning. + with wave.open(TESTFN, 'wb') as f: + f.setnchannels(nchannels) + f.setsampwidth(sampwidth) + f.setframerate(framerate) with wave.open(TESTFN) as f: self.assertFalse(f.getfp().closed) - self.assertIs(f.getfp(), None) - - with open(TESTFN, 'wb') as testfile: - with self.assertRaises(wave.Error): - with wave.open(testfile, 'wb'): - pass - self.assertEqual(testfile.closed, False) + params = f.getparams() + self.assertEqual(params.nchannels, nchannels) + self.assertEqual(params.sampwidth, sampwidth) + self.assertEqual(params.framerate, framerate) + self.assertIsNone(f.getfp()) if __name__ == '__main__': diff --git a/Lib/wave.py b/Lib/wave.py --- a/Lib/wave.py +++ b/Lib/wave.py @@ -448,11 +448,13 @@ def close(self): if self._file: - self._ensure_header_written(0) - if self._datalength != self._datawritten: - self._patchheader() - self._file.flush() - self._file = None + try: + self._ensure_header_written(0) + if self._datalength != self._datawritten: + self._patchheader() + self._file.flush() + finally: + self._file = None if self._i_opened_the_file: self._i_opened_the_file.close() self._i_opened_the_file = None -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Thu Aug 1 05:49:34 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Thu, 01 Aug 2013 05:49:34 +0200 Subject: [Python-checkins] Daily reference leaks (4f3b6eff2ede): sum=12358 Message-ID: results for 4f3b6eff2ede on branch "default" -------------------------------------------- test_capi leaked [3464, 3464, 3464] references, sum=10392 test_capi leaked [654, 656, 656] memory blocks, sum=1966 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogavskOF', '-x'] From python-checkins at python.org Thu Aug 1 13:45:55 2013 From: python-checkins at python.org (tim.golden) Date: Thu, 1 Aug 2013 13:45:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=239035=3A_os=2Epath?= =?utf-8?q?=2Eismount_now_recognises_volumes_mounted_below?= Message-ID: <3c5V7l3c9kz7LjZ@mail.python.org> http://hg.python.org/cpython/rev/f283589cb71e changeset: 84945:f283589cb71e user: Tim Golden date: Thu Aug 01 12:44:00 2013 +0100 summary: Issue #9035: os.path.ismount now recognises volumes mounted below a drive root on Windows. Original patch by Atsuo Ishimoto. files: Lib/ntpath.py | 29 ++++++++++++++++--- Lib/test/test_ntpath.py | 34 +++++++++++++++++++++++ Misc/NEWS | 3 ++ Modules/posixmodule.c | 42 +++++++++++++++++++++++++++++ 4 files changed, 103 insertions(+), 5 deletions(-) diff --git a/Lib/ntpath.py b/Lib/ntpath.py --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -335,16 +335,35 @@ return False return True -# Is a path a mount point? Either a root (with or without drive letter) -# or an UNC path with at most a / or \ after the mount point. - +# Is a path a mount point? +# Any drive letter root (eg c:\) +# Any share UNC (eg \\server\share) +# Any volume mounted on a filesystem folder +# +# No one method detects all three situations. Historically we've lexically +# detected drive letter roots and share UNCs. The canonical approach to +# detecting mounted volumes (querying the reparse tag) fails for the most +# common case: drive letter roots. The alternative which uses GetVolumePathName +# fails if the drive letter is the result of a SUBST. +try: + from nt import _getvolumepathname +except ImportError: + _getvolumepathname = None def ismount(path): - """Test whether a path is a mount point (defined as root of drive)""" + """Test whether a path is a mount point (a drive root, the root of a + share, or a mounted volume)""" seps = _get_bothseps(path) + path = abspath(path) root, rest = splitdrive(path) if root and root[0] in seps: return (not rest) or (rest in seps) - return rest in seps + if rest in seps: + return True + + if _getvolumepathname: + return path.rstrip(seps) == _getvolumepathname(path).rstrip(seps) + else: + return False # Expand paths beginning with '~' or '~user'. diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -256,6 +256,40 @@ # dialogs (#4804) ntpath.sameopenfile(-1, -1) + def test_ismount(self): + self.assertTrue(ntpath.ismount("c:\\")) + self.assertTrue(ntpath.ismount("C:\\")) + self.assertTrue(ntpath.ismount("c:/")) + self.assertTrue(ntpath.ismount("C:/")) + self.assertTrue(ntpath.ismount("\\\\.\\c:\\")) + self.assertTrue(ntpath.ismount("\\\\.\\C:\\")) + + self.assertTrue(ntpath.ismount(b"c:\\")) + self.assertTrue(ntpath.ismount(b"C:\\")) + self.assertTrue(ntpath.ismount(b"c:/")) + self.assertTrue(ntpath.ismount(b"C:/")) + self.assertTrue(ntpath.ismount(b"\\\\.\\c:\\")) + self.assertTrue(ntpath.ismount(b"\\\\.\\C:\\")) + + with support.temp_dir() as d: + self.assertFalse(ntpath.ismount(d)) + + # + # Make sure the current folder isn't the root folder + # (or any other volume root). The drive-relative + # locations below cannot then refer to mount points + # + drive, path = ntpath.splitdrive(sys.executable) + with support.change_cwd(os.path.dirname(sys.executable)): + self.assertFalse(ntpath.ismount(drive.lower())) + self.assertFalse(ntpath.ismount(drive.upper())) + + self.assertTrue(ntpath.ismount("\\\\localhost\\c$")) + self.assertTrue(ntpath.ismount("\\\\localhost\\c$\\")) + + self.assertTrue(ntpath.ismount(b"\\\\localhost\\c$")) + self.assertTrue(ntpath.ismount(b"\\\\localhost\\c$\\")) + class NtCommonTest(test_genericpath.CommonTest, unittest.TestCase): pathmodule = ntpath diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #9035: ismount now recognises volumes mounted below a drive root + on Windows. Original patch by Atsuo Ishimoto. + - Issue #18214: Improve finalization of Python modules to avoid setting their globals to None, in most cases. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -3711,6 +3711,47 @@ else Py_RETURN_FALSE; } + +PyDoc_STRVAR(posix__getvolumepathname__doc__, +"Return volume mount point of the specified path."); + +/* A helper function for ismount on windows */ +static PyObject * +posix__getvolumepathname(PyObject *self, PyObject *args) +{ + PyObject *po, *result; + wchar_t *path, *mountpath=NULL; + size_t bufsize; + BOOL ret; + + if (!PyArg_ParseTuple(args, "U|:_getvolumepathname", &po)) + return NULL; + path = PyUnicode_AsUnicode(po); + if (path == NULL) + return NULL; + + /* Volume path should be shorter than entire path */ + bufsize = max(MAX_PATH, wcslen(path) * 2 * sizeof(wchar_t)+1); + mountpath = (wchar_t *)PyMem_Malloc(bufsize); + if (mountpath == NULL) + return PyErr_NoMemory(); + + Py_BEGIN_ALLOW_THREADS + ret = GetVolumePathNameW(path, mountpath, bufsize); + Py_END_ALLOW_THREADS + + if (!ret) { + result = win32_error_object("_getvolumepathname", po); + goto exit; + } + result = PyUnicode_FromWideChar(mountpath, wcslen(mountpath)); + +exit: + PyMem_Free(mountpath); + return result; +} +/* end of posix__getvolumepathname */ + #endif /* MS_WINDOWS */ PyDoc_STRVAR(posix_mkdir__doc__, @@ -10885,6 +10926,7 @@ {"_getfinalpathname", posix__getfinalpathname, METH_VARARGS, NULL}, {"_isdir", posix__isdir, METH_VARARGS, posix__isdir__doc__}, {"_getdiskusage", win32__getdiskusage, METH_VARARGS, win32__getdiskusage__doc__}, + {"_getvolumepathname", posix__getvolumepathname, METH_VARARGS, posix__getvolumepathname__doc__}, #endif #ifdef HAVE_GETLOADAVG {"getloadavg", posix_getloadavg, METH_NOARGS, posix_getloadavg__doc__}, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 14:26:10 2013 From: python-checkins at python.org (nick.coghlan) Date: Thu, 1 Aug 2013 14:26:10 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_8_modernisation?= Message-ID: <3c5W2B3f71z7LjQ@mail.python.org> http://hg.python.org/peps/rev/fb24c80e9afb changeset: 5012:fb24c80e9afb user: Nick Coghlan date: Thu Aug 01 22:25:37 2013 +1000 summary: PEP 8 modernisation Thanks to Guido van Rossum, Barry Warsaw, Raymond Hettinger, Terry Reedy, Thomas Wouters and others for the constructive feedback :) files: pep-0008.txt | 257 ++++++++++++++++++++++++++++++-------- 1 files changed, 198 insertions(+), 59 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -3,12 +3,13 @@ Version: $Revision$ Last-Modified: $Date$ Author: Guido van Rossum , - Barry Warsaw + Barry Warsaw , + Nick Coghlan Status: Active Type: Process Content-Type: text/x-rst Created: 05-Jul-2001 -Post-History: 05-Jul-2001 +Post-History: 05-Jul-2001, 01-Aug-2013 Introduction @@ -23,6 +24,10 @@ Guido's original Python Style Guide essay, with some additions from Barry's style guide [2]_. +This style guide evolves over time as additional conventions are +identified and past conventions are rendered obsolete by changes in +the language itself. + A Foolish Consistency is the Hobgoblin of Little Minds ====================================================== @@ -41,15 +46,24 @@ judgment. Look at other examples and decide what looks best. And don't hesitate to ask! -Two good reasons to break a particular rule: +In particular: do not break backwards compatibility just to comply with +this PEP! -1. When applying the rule would make the code less readable, even for - someone who is used to reading code that follows the rules. +Some other good reasons to ignore a particular guideline: + +1. When applying the guideline would make the code less readable, even + for someone who is used to reading code that follows this PEP. 2. To be consistent with surrounding code that also breaks it (maybe for historic reasons) -- although this is also an opportunity to clean up someone else's mess (in true XP style). +3. Because the code in question predates the introduction of the + guideline and there is no other reason to be modifying that code. + +4. When the code needs to remain compatible with older versions of + Python that don't support the feature recommended by the style guide. + Code lay-out ============ @@ -59,9 +73,6 @@ Use 4 spaces per indentation level. -For really old code that you don't want to mess up, you can continue -to use 8-space tabs. - Continuation lines should align wrapped elements either vertically using Python's implicit line joining inside parentheses, brackets and braces, or using a hanging indent. When using a hanging indent the @@ -129,31 +140,41 @@ Tabs or Spaces? --------------- -Never mix tabs and spaces. +Spaces are the preferred indentation method. -The most popular way of indenting Python is with spaces only. The -second-most popular way is with tabs only. Code indented with a -mixture of tabs and spaces should be converted to using spaces -exclusively. When invoking the Python command line interpreter with +Tabs should be used solely to remain consistent with code that is +already indented with tabs. + +Python 3 disallows mixing the use of tabs and spaces for indentation. + +Python 2 code indented with a mixture of tabs and spaces should be +converted to using spaces exclusively. + +When invoking the Python 2 command line interpreter with the ``-t`` option, it issues warnings about code that illegally mixes tabs and spaces. When using ``-tt`` these warnings become errors. These options are highly recommended! -For new projects, spaces-only are strongly recommended over tabs. -Most editors have features that make this easy to do. Maximum Line Length ------------------- -Limit all lines to a maximum of 79 characters. +Aim to limit all lines to a maximum of 79 characters, but up to 99 +characters is acceptable when it improves readability. -There are still many devices around that are limited to 80 character -lines; plus, limiting windows to 80 characters makes it possible to -have several windows side-by-side. The default wrapping on such -devices disrupts the visual structure of the code, making it more -difficult to understand. Therefore, please limit all lines to a -maximum of 79 characters. For flowing long blocks of text (docstrings -or comments), limiting the length to 72 characters is recommended. +For flowing long blocks of text with fewer structural restrictions +(docstrings or comments), limiting the line length to 72 characters +is recommended. + +Limiting the required editor window width makes it possible to have +several files open side-by-side, and works well when using code +review tools that present the two versions in adjacent columns. + +The default wrapping in most tools disrupts the visual structure of the +code, making it more difficult to understand. The limits are chosen to +avoid wrapping in editors with the window width set to 80 (or 100), even +if the tool places a marker glyph in the final column when wrapping +lines. Some web based tools may not offer dynamic line wrapping at all. The preferred way of wrapping long lines is by using Python's implied line continuation inside parentheses, brackets and braces. Long lines @@ -214,15 +235,17 @@ Encodings (PEP 263) ------------------- -Code in the core Python distribution should always use the ASCII or -Latin-1 encoding (a.k.a. ISO-8859-1). For Python 3.0 and beyond, -UTF-8 is preferred over Latin-1, see PEP 3120. +Code in the core Python distribution should always use UTF-8 (or ASCII +in Python 2). -Files using ASCII should not have a coding cookie. Latin-1 (or UTF-8) -should only be used when a comment or docstring needs to mention an -author name that requires Latin-1; otherwise, using ``\x``, ``\u`` or -``\U`` escapes is the preferred way to include non-ASCII data in -string literals. +Files using ASCII (in Python 2) or UTF-8 (in Python 3) should not have a +coding cookie. + +In the standard library, non-default encodings should be used only for +test purposes or when a comment or docstring needs to mention an author +name that that contains non-ASCII characters; otherwise, using ``\x``, +``\u``, ``\U``, or ``\N`` escapes is the preferred way to include +non-ASCII data in string literals. For Python 3.0 and beyond, the following policy is prescribed for the standard library (see PEP 3131): All identifiers in the Python @@ -266,11 +289,27 @@ Put any relevant ``__all__`` specification after the imports. -- Relative imports for intra-package imports are highly discouraged. - Always use the absolute package path for all imports. Even now that - PEP 328 is fully implemented in Python 2.5, its style of explicit - relative imports is actively discouraged; absolute imports are more - portable and usually more readable. +- Absolute imports are recommended, as they are usually more readable + and tend to be better behaved (or at least give better error + messages) if the import system is incorrectly configured (such as + when a directory inside a package ends up on ``sys.path``):: + + import mypkg.sibling + from mypkg import sibling + from mypkg.sibling import example + + However, explicit relative imports are an acceptable alternative to + absolute imports, especially when dealing with complex package layouts + where using absolute imports would be unecessarily verbose:: + + from . import sibling + from .sibling import example + + Standard library code should avoid complex package layouts and always + use absolute imports. + + Implicit relative imports should *never* be used and have been removed + in Python 3. - When importing a class from a class-containing module, it's usually okay to spell this:: @@ -285,6 +324,18 @@ and use "myclass.MyClass" and "foo.bar.yourclass.YourClass". +- Wildcard imports (``from import *``) should be avoided, as + they make it unclear which names are present in the namespace, + confusing both readers and many automated tools. There is one + defensible use case for a wildcard import, which is to republish an + internal interface as part of a public API (for example, overwriting + a pure Python implementation of an interface with the definitions + from an optional accelerator module and exactly which definitions + will be overwritten isn't known in advance). + + When republishing names this way, the guidelines below regarding + public and internal interfaces still apply. + Whitespace in Expressions and Statements ======================================== @@ -760,6 +811,36 @@ advanced callers. +Public and internal interfaces +------------------------------ + +Any backwards compatibility guarantees apply only to public interfaces. +Accordingly, it is important that users be able to clearly distinguish +between public and internal interfaces. + +Documented interfaces are considered public, unless the documentation +explicitly declares them to be provisional or internal interfaces exempt +from the usual backwards compatibility guarantees. All undocumented +interfaces should be assumed to be internal. + +To better support introspection, modules should explicitly declare the +names in their public API using the ``__all__`` attribute. Setting +``__all__`` to an empty list indicates that the module has no public API. + +Even with ``__all__`` set appropriately, internal interfaces (packages, +modules, classes, functions, attributes or other names) should still be +prefixed with a single leading underscore. + +An interface is also considered internal if any containing namespace +(package, module or class) is considered internal. + +Imported names should always be considered an implementation detail. +Other modules must not rely on indirect access to such imported names +unless they are an explicitly documented part of the containing module's +API, such as ``os.path`` or a package's ``__init__`` module that exposes +functionality from submodules. + + Programming Recommendations =========================== @@ -769,10 +850,12 @@ For example, do not rely on CPython's efficient implementation of in-place string concatenation for statements in the form ``a += b`` - or ``a = a + b``. Those statements run more slowly in Jython. In - performance sensitive parts of the library, the ``''.join()`` form - should be used instead. This will ensure that concatenation occurs - in linear time across various implementations. + or ``a = a + b``. This optimization is fragile even in CPython (it + only works for some types) and isn't present at all in implementations + that don't use refcounting. In performance sensitive parts of the + library, the ``''.join()`` form should be used instead. This will + ensure that concatenation occurs in linear time across various + implementations. - Comparisons to singletons like None should always be done with ``is`` or ``is not``, never the equality operators. @@ -799,29 +882,59 @@ operator. However, it is best to implement all six operations so that confusion doesn't arise in other contexts. -- Use class-based exceptions. +- Always use a def statement instead of assigning a lambda expression + to a name. - String exceptions in new code are forbidden, and this language - feature has been removed in Python 2.6. + Yes:: - Modules or packages should define their own domain-specific base - exception class, which should be subclassed from the built-in - Exception class. Always include a class docstring. E.g.:: + def f(x): return 2*x - class MessageError(Exception): - """Base class for errors in the email package.""" + No:: + + f = lambda x: 2*x + + The first form means that the name of the resulting function object is + specifically 'f' instead of the generic ''. This is more + useful for tracebacks and string representations in general. The use + of the assignment statement eliminates the sole benefit a lambda + expression can offer over an explicit def statement (i.e. that it can + be embedded inside a larger expression) + +- Derive exceptions from ``Exception`` rather than ``BaseException``. + Direct inheritance from ``BaseException`` is reserved for exceptions + where catching them is almost always the wrong thing to do. + + Design exception hierarchies based on the distinctions that code + *catching* the exceptions is likely to need, rather than the locations + where the exceptions are raised. Aim to answer the question + "What went wrong?" programmatically, rather than only stating that + "A problem occurred" (see PEP 3151 for an example of this lesson being + learned for the builtin exception hierarchy) Class naming conventions apply here, although you should add the - suffix "Error" to your exception classes, if the exception is an - error. Non-error exceptions need no special suffix. + suffix "Error" to your exception classes if the exception is an + error. Non-error exceptions that are used for non-local flow control + or other forms of signalling need no special suffix. -- When raising an exception, use ``raise ValueError('message')`` +- Use exception chaining appropriately. In Python 3, "raise X from Y" + should be used to indicate explicit replacement without losing the + original traceback. + + When deliberately replacing an inner exception (using "raise X" in + Python 2 or "raise X from None" in Python 3.3+), ensure that relevant + details are transferred to the new exception (such as preserving the + attribute name when converting KeyError to AttributeError, or + embedding the text of the original exception in the new exception + message). + +- When raising an exception in Python 2, use ``raise ValueError('message')`` instead of the older form ``raise ValueError, 'message'``. - The paren-using form is preferred because when the exception - arguments are long or include string formatting, you don't need to - use line continuation characters thanks to the containing - parentheses. The older form is not legal syntax in Python 3. + The latter form is not legal Python 3 syntax. + + The paren-using form also means that when the exception arguments are + long or include string formatting, you don't need to use line + continuation characters thanks to the containing parentheses. - When catching exceptions, mention specific exceptions whenever possible instead of using a bare ``except:`` clause. @@ -851,6 +964,21 @@ exception propagate upwards with ``raise``. ``try...finally`` can be a better way to handle this case. +- When binding caught exceptions to a name, prefer the explicit name + binding syntax added in Python 2.6:: + + try: + process_data() + except Exception as exc: + raise DataProcessingFailedError(str(exc)) + + This is the only syntax supported in Python 3, and avoids the + ambiguity problems associated with the older comma-based syntax. + +- When catching operating system errors, prefer the explicit exception + hierarchy introduced in Python 3.3 over introspection of ``errno`` + values. + - Additionally, for all try/except clauses, limit the ``try`` clause to the absolute minimum amount of code necessary. Again, this avoids masking bugs. @@ -873,6 +1001,10 @@ # Will also catch KeyError raised by handle_value() return key_not_found(key) +- When a resource is local to a particular section of code, use a + ``with`` statement to ensure it is cleaned up promptly and reliably + after use. A try/finally statement is also acceptable. + - Context managers should be invoked through separate functions or methods whenever they do something other than acquire and release resources. For example: @@ -907,9 +1039,6 @@ Yes: if foo.startswith('bar'): No: if foo[:3] == 'bar': - The exception is if your code must work with Python 1.5.2 (but let's - hope not!). - - Object type comparisons should always use isinstance() instead of comparing types directly. :: @@ -918,11 +1047,15 @@ No: if type(obj) is type(1): When checking if an object is a string, keep in mind that it might - be a unicode string too! In Python 2.3, str and unicode have a + be a unicode string too! In Python 2, str and unicode have a common base class, basestring, so you can do:: if isinstance(obj, basestring): + Note that in Python 3, ``unicode`` and ``basestring`` no longer exist + (there is only ``str``) and a bytes object is no longer a kind of + string (it is a sequence of integers instead) + - For sequences, (strings, lists, tuples), use the fact that empty sequences are false. :: @@ -947,6 +1080,10 @@ annotation style. Instead, the annotations are left for users to discover and experiment with useful annotation styles. + It is recommended that third party experimants with annotations use an + associated decorator to indicate how the annotation should be + interpreted. + Early core developer attempts to use function annotations revealed inconsistent, ad-hoc annotation styles. For example: @@ -1004,6 +1141,8 @@ .. [3] http://www.wikipedia.com/wiki/CamelCase +.. [4] PEP 8 modernisation, July 2013 + http://bugs.python.org/issue18472 Copyright ========= -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Aug 1 14:53:45 2013 From: python-checkins at python.org (eli.bendersky) Date: Thu, 1 Aug 2013 14:53:45 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?devguide=3A_Take_a_bit_more_asterisk-?= =?utf-8?q?responsibility?= Message-ID: <3c5Wf14nFgzRvP@mail.python.org> http://hg.python.org/devguide/rev/2d93e4f13b92 changeset: 634:2d93e4f13b92 user: Eli Bendersky date: Thu Aug 01 05:53:35 2013 -0700 summary: Take a bit more asterisk-responsibility files: experts.rst | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/experts.rst b/experts.rst --- a/experts.rst +++ b/experts.rst @@ -100,7 +100,7 @@ dummy_threading brett.cannon email barry, r.david.murray* encodings lemburg, loewis -enum eli.bendersky, barry, ethan.furman* +enum eli.bendersky*, barry, ethan.furman* errno exceptions fcntl @@ -249,7 +249,7 @@ xml.dom xml.dom.minidom xml.dom.pulldom -xml.etree effbot (inactive), eli.bendersky +xml.etree effbot (inactive), eli.bendersky* xml.parsers.expat christian.heimes xml.sax christian.heimes xml.sax.handler -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Thu Aug 1 15:03:36 2013 From: python-checkins at python.org (tim.golden) Date: Thu, 1 Aug 2013 15:03:36 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_issue9035=3A_Prevent_Windo?= =?utf-8?q?ws-specific_tests_from_running_on_non-Windows_platforms?= Message-ID: <3c5WsN0wDXz7Lkf@mail.python.org> http://hg.python.org/cpython/rev/5258c4399f2e changeset: 84946:5258c4399f2e user: Tim Golden date: Thu Aug 01 13:58:58 2013 +0100 summary: issue9035: Prevent Windows-specific tests from running on non-Windows platforms files: Lib/test/test_ntpath.py | 28 ++++++++++++++-------------- 1 files changed, 14 insertions(+), 14 deletions(-) diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -274,22 +274,22 @@ with support.temp_dir() as d: self.assertFalse(ntpath.ismount(d)) - # - # Make sure the current folder isn't the root folder - # (or any other volume root). The drive-relative - # locations below cannot then refer to mount points - # - drive, path = ntpath.splitdrive(sys.executable) - with support.change_cwd(os.path.dirname(sys.executable)): - self.assertFalse(ntpath.ismount(drive.lower())) - self.assertFalse(ntpath.ismount(drive.upper())) + if sys.platform == "win32": + # + # Make sure the current folder isn't the root folder + # (or any other volume root). The drive-relative + # locations below cannot then refer to mount points + # + drive, path = ntpath.splitdrive(sys.executable) + with support.change_cwd(os.path.dirname(sys.executable)): + self.assertFalse(ntpath.ismount(drive.lower())) + self.assertFalse(ntpath.ismount(drive.upper())) - self.assertTrue(ntpath.ismount("\\\\localhost\\c$")) - self.assertTrue(ntpath.ismount("\\\\localhost\\c$\\")) + self.assertTrue(ntpath.ismount("\\\\localhost\\c$")) + self.assertTrue(ntpath.ismount("\\\\localhost\\c$\\")) - self.assertTrue(ntpath.ismount(b"\\\\localhost\\c$")) - self.assertTrue(ntpath.ismount(b"\\\\localhost\\c$\\")) - + self.assertTrue(ntpath.ismount(b"\\\\localhost\\c$")) + self.assertTrue(ntpath.ismount(b"\\\\localhost\\c$\\")) class NtCommonTest(test_genericpath.CommonTest, unittest.TestCase): pathmodule = ntpath -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 15:33:00 2013 From: python-checkins at python.org (matthias.klose) Date: Thu, 1 Aug 2013 15:33:00 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_-_Issue_=2318257=3A_Fix_re?= =?utf-8?q?adlink_usage_in_python-config=2E__Install_the_python?= Message-ID: <3c5XWJ3FXzzPqh@mail.python.org> http://hg.python.org/cpython/rev/7b165c7ab7ef changeset: 84947:7b165c7ab7ef user: doko at ubuntu.com date: Thu Aug 01 15:32:49 2013 +0200 summary: - Issue #18257: Fix readlink usage in python-config. Install the python version again on Darwin. files: Makefile.pre.in | 7 +++++++ Misc/NEWS | 3 +++ Misc/python-config.in | 2 ++ Misc/python-config.sh.in | 9 +++++++-- 4 files changed, 19 insertions(+), 2 deletions(-) diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1201,6 +1201,13 @@ sed -e "s, at EXENAME@,$(BINDIR)/python$(LDVERSION)$(EXE)," < $(srcdir)/Misc/python-config.in >python-config.py # Replace makefile compat. variable references with shell script compat. ones; $(VAR) -> ${VAR} sed -e 's,\$$(\([A-Za-z0-9_]*\)),\$$\{\1\},g' < Misc/python-config.sh >python-config + # On Darwin, always use the python version of the script, the shell + # version doesn't use the compiler customizations that are provided + # in python (_osx_support.py). + if test `uname -s` = Darwin; then \ + cp python-config.py python-config; \ + fi + # Install the include files INCLDIRSTOMAKE=$(INCLUDEDIR) $(CONFINCLUDEDIR) $(INCLUDEPY) $(CONFINCLUDEPY) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -737,6 +737,9 @@ Build ----- +- Issue #18257: Fix readlink usage in python-config. Install the python + version again on Darwin. + - Issue #18481: Add C coverage reporting with gcov and lcov. A new make target "coverage-report" creates an instrumented Python build, runs unit tests and creates a HTML. The report can be updated with "make coverage-lcov". diff --git a/Misc/python-config.in b/Misc/python-config.in --- a/Misc/python-config.in +++ b/Misc/python-config.in @@ -1,6 +1,8 @@ #!@EXENAME@ # -*- python -*- +# Keep this script in sync with python-config.sh.in + import getopt import os import sys diff --git a/Misc/python-config.sh.in b/Misc/python-config.sh.in --- a/Misc/python-config.sh.in +++ b/Misc/python-config.sh.in @@ -1,5 +1,7 @@ #!/bin/sh +# Keep this script in sync with python-config.in + exit_with_usage () { echo "Usage: $0 --prefix|--exec-prefix|--includes|--libs|--cflags|--ldflags|--extension-suffix|--help|--abiflags|--configdir" @@ -15,7 +17,9 @@ { RESULT=$(dirname $(cd $(dirname "$1") && pwd -P)) if which readlink >/dev/null 2>&1 ; then - RESULT=$(readlink -f "$RESULT") + if readlink -f "$RESULT" >/dev/null 2>&1; then + RESULT=$(readlink -f "$RESULT") + fi fi echo $RESULT } @@ -23,7 +27,8 @@ prefix_build="@prefix@" prefix_real=$(installed_prefix "$0") -# Use sed to fix paths from their built to locations to their installed to locations. +# Use sed to fix paths from their built-to locations to their installed-to +# locations. prefix=$(echo "$prefix_build" | sed "s#$prefix_build#$prefix_real#") exec_prefix_build="@exec_prefix@" exec_prefix=$(echo "$exec_prefix_build" | sed "s#$exec_prefix_build#$prefix_real#") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 15:34:45 2013 From: python-checkins at python.org (nick.coghlan) Date: Thu, 1 Aug 2013 15:34:45 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Fix_a_typo?= Message-ID: <3c5XYK0s1Wz7LjS@mail.python.org> http://hg.python.org/peps/rev/ae0c89014b19 changeset: 5013:ae0c89014b19 user: Nick Coghlan date: Thu Aug 01 23:34:34 2013 +1000 summary: Fix a typo files: pep-0008.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -1080,7 +1080,7 @@ annotation style. Instead, the annotations are left for users to discover and experiment with useful annotation styles. - It is recommended that third party experimants with annotations use an + It is recommended that third party experiments with annotations use an associated decorator to indicate how the annotation should be interpreted. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Aug 1 17:04:18 2013 From: python-checkins at python.org (guido.van.rossum) Date: Thu, 1 Aug 2013 17:04:18 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Correctly_spell_hierarchy=2E?= Message-ID: <3c5ZXf1Y4cz7LlS@mail.python.org> http://hg.python.org/peps/rev/185a6cf22a00 changeset: 5014:185a6cf22a00 user: Guido van Rossum date: Thu Aug 01 08:04:13 2013 -0700 summary: Correctly spell hierarchy. files: pep-0413.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0413.txt b/pep-0413.txt --- a/pep-0413.txt +++ b/pep-0413.txt @@ -627,7 +627,7 @@ # Add maint.1, compat.1 etc as releases are made -Putting the version information in the directory heirarchy isn't strictly +Putting the version information in the directory hierarchy isn't strictly necessary (since the NEWS file generator could figure out from the version history), but does make it easier for *humans* to keep the different versions in order. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Aug 1 17:35:25 2013 From: python-checkins at python.org (martin.v.loewis) Date: Thu, 1 Aug 2013 17:35:25 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Add_UUID_for_3=2E4=2E?= Message-ID: <3c5bDY1ysbz7LjT@mail.python.org> http://hg.python.org/cpython/rev/2c875178a8a2 changeset: 84948:2c875178a8a2 user: Martin v. L?wis date: Thu Aug 01 17:34:42 2013 +0200 summary: Add UUID for 3.4. files: Tools/msi/msi.py | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -121,6 +121,7 @@ "31":"{4afcba0b-13e4-47c3-bebe-477428b46913}", "32":"{3ff95315-1096-4d31-bd86-601d5438ad5e}", "33":"{f7581ca4-d368-4eea-8f82-d48c64c4f047}", + "34":"{7A0C5812-2583-40D9-BCBB-CD7485F11377}", } [major+minor] # Compute the name that Sphinx gives to the docfile -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 18:31:30 2013 From: python-checkins at python.org (martin.v.loewis) Date: Thu, 1 Aug 2013 18:31:30 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318569=3A_The_inst?= =?utf-8?q?aller_now_adds_=2Epy_to_the_PATHEXT_variable?= Message-ID: <3c5cTG5CRmz7Lkn@mail.python.org> http://hg.python.org/cpython/rev/2a38e4da6ce8 changeset: 84949:2a38e4da6ce8 user: Martin v. L?wis date: Thu Aug 01 18:31:06 2013 +0200 summary: Issue #18569: The installer now adds .py to the PATHEXT variable when extensions are registered. Patch by Paul Moore. files: Misc/NEWS | 6 ++++++ Tools/msi/msi.py | 4 ++++ 2 files changed, 10 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -734,6 +734,12 @@ - Issue #17532: Always include Options menu for IDLE on OS X. Patch by Guilherme Sim?es. +Windows +------- + +- Issue #18569: The installer now adds .py to the PATHEXT variable when extensions + are registered. Patch by Paul Moore. + Build ----- diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -1270,6 +1270,10 @@ "{60254CA5-953B-11CF-8C96-00AA00B8708C}", "REGISTRY.def"), ]) + # PATHEXT + add_data(db, "Environment", + [("PathExtAddition", "=-*PathExt", "[~];.PY", "REGISTRY.def")]) + # Registry keys prefix = r"Software\%sPython\PythonCore\%s" % (testprefix, short_version) add_data(db, "Registry", -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 19:20:39 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 1 Aug 2013 19:20:39 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Add_a_test_for_module_weak?= =?utf-8?q?refability?= Message-ID: <3c5dYz0bxLzNNc@mail.python.org> http://hg.python.org/cpython/rev/cdddd3ed2ae1 changeset: 84950:cdddd3ed2ae1 user: Antoine Pitrou date: Thu Aug 01 19:20:31 2013 +0200 summary: Add a test for module weakrefability files: Lib/test/test_module.py | 9 +++++++++ 1 files changed, 9 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_module.py b/Lib/test/test_module.py --- a/Lib/test/test_module.py +++ b/Lib/test/test_module.py @@ -1,5 +1,6 @@ # Test the module type import unittest +import weakref from test.support import run_unittest, gc_collect from test.script_helper import assert_python_ok @@ -95,6 +96,14 @@ gc_collect() self.assertEqual(destroyed, [1]) + def test_weakref(self): + m = ModuleType("foo") + wr = weakref.ref(m) + self.assertIs(wr(), m) + del m + gc_collect() + self.assertIs(wr(), None) + def test_module_repr_minimal(self): # reprs when modules have no __file__, __name__, or __loader__ m = ModuleType('foo') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 19:46:39 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 1 Aug 2013 19:46:39 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Remove_Lib/site=2Epy_hack_?= =?utf-8?q?to_unregister_patched_builtins=2E?= Message-ID: <3c5f7z071wz7Lk5@mail.python.org> http://hg.python.org/cpython/rev/4ffa5f9e4ac2 changeset: 84951:4ffa5f9e4ac2 user: Antoine Pitrou date: Thu Aug 01 19:46:04 2013 +0200 summary: Remove Lib/site.py hack to unregister patched builtins. It creates a refleak in subinterpreters, as atexit callbacks aren't triggered at their end. files: Lib/site.py | 39 ++++++++++----------------------------- 1 files changed, 10 insertions(+), 29 deletions(-) diff --git a/Lib/site.py b/Lib/site.py --- a/Lib/site.py +++ b/Lib/site.py @@ -68,7 +68,6 @@ ImportError exception, it is silently ignored. """ -import atexit import sys import os import re @@ -87,25 +86,6 @@ USER_BASE = None -_no_builtin = object() - -def _patch_builtins(**items): - # When patching builtins, we make some objects almost immortal - # (builtins are only reclaimed at the very end of the interpreter - # shutdown sequence). To avoid keeping to many references alive, - # we register callbacks to undo our builtins additions. - old_items = {k: getattr(builtins, k, _no_builtin) for k in items} - def unpatch(old_items=old_items): - for k, v in old_items.items(): - if v is _no_builtin: - delattr(builtins, k) - else: - setattr(builtins, k, v) - for k, v in items.items(): - setattr(builtins, k, v) - atexit.register(unpatch) - - def makepath(*paths): dir = os.path.join(*paths) try: @@ -377,7 +357,8 @@ except: pass raise SystemExit(code) - _patch_builtins(quit=Quitter('quit'), exit=Quitter('exit')) + builtins.quit = Quitter('quit') + builtins.exit = Quitter('exit') class _Printer(object): @@ -442,20 +423,20 @@ def setcopyright(): """Set 'copyright' and 'credits' in builtins""" - _patch_builtins(copyright=_Printer("copyright", sys.copyright)) + builtins.copyright = _Printer("copyright", sys.copyright) if sys.platform[:4] == 'java': - _patch_builtins(credits=_Printer( + builtins.credits = _Printer( "credits", - "Jython is maintained by the Jython developers (www.jython.org).")) + "Jython is maintained by the Jython developers (www.jython.org).") else: - _patch_builtins(credits=_Printer("credits", """\ + builtins.credits = _Printer("credits", """\ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands - for supporting Python development. See www.python.org for more information.""")) + for supporting Python development. See www.python.org for more information.""") here = os.path.dirname(os.__file__) - _patch_builtins(license=_Printer( + builtins.license = _Printer( "license", "See http://www.python.org/%.3s/license.html" % sys.version, ["LICENSE.txt", "LICENSE"], - [os.path.join(here, os.pardir), here, os.curdir])) + [os.path.join(here, os.pardir), here, os.curdir]) class _Helper(object): @@ -472,7 +453,7 @@ return pydoc.help(*args, **kwds) def sethelper(): - _patch_builtins(help=_Helper()) + builtins.help = _Helper() def enablerlcompleter(): """Enable default readline configuration on interactive prompts, by -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 20:43:36 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 1 Aug 2013 20:43:36 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_test=5Fcapi=3A_make_a_spec?= =?utf-8?q?ific_test_case_for_the_subinterpreter_test?= Message-ID: <3c5gPh4X5CzNn1@mail.python.org> http://hg.python.org/cpython/rev/3e0275ec4373 changeset: 84952:3e0275ec4373 user: Antoine Pitrou date: Thu Aug 01 20:43:26 2013 +0200 summary: test_capi: make a specific test case for the subinterpreter test (it was wrongly classified in the pending calls test case) files: Lib/test/test_capi.py | 7 ++++++- 1 files changed, 6 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -193,6 +193,9 @@ self.pendingcalls_submit(l, n) self.pendingcalls_wait(l, n) + +class SubinterpreterTest(unittest.TestCase): + def test_subinterps(self): import builtins r, w = os.pipe() @@ -208,6 +211,7 @@ self.assertNotEqual(pickle.load(f), id(sys.modules)) self.assertNotEqual(pickle.load(f), id(builtins)) + # Bug #6012 class Test6012(unittest.TestCase): def test(self): @@ -354,7 +358,8 @@ def test_main(): support.run_unittest(CAPITest, TestPendingCalls, Test6012, - EmbeddingTest, SkipitemTest, TestThreadState) + EmbeddingTest, SkipitemTest, TestThreadState, + SubinterpreterTest) for name in dir(_testcapi): if name.startswith('test_'): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 20:56:21 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 1 Aug 2013 20:56:21 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318619=3A_Fix_atex?= =?utf-8?q?it_leaking_callbacks_registered_from_sub-interpreters=2C?= Message-ID: <3c5ghP5H3kz7LjP@mail.python.org> http://hg.python.org/cpython/rev/71b63a32b1e3 changeset: 84953:71b63a32b1e3 user: Antoine Pitrou date: Thu Aug 01 20:56:12 2013 +0200 summary: Issue #18619: Fix atexit leaking callbacks registered from sub-interpreters, and make it GC-aware. files: Lib/test/test_atexit.py | 42 +++++++++- Misc/NEWS | 3 + Modules/atexitmodule.c | 121 ++++++++++++++++++--------- 3 files changed, 122 insertions(+), 44 deletions(-) diff --git a/Lib/test/test_atexit.py b/Lib/test/test_atexit.py --- a/Lib/test/test_atexit.py +++ b/Lib/test/test_atexit.py @@ -2,6 +2,7 @@ import unittest import io import atexit +import _testcapi from test import support ### helpers @@ -23,7 +24,9 @@ def raise2(): raise SystemError -class TestCase(unittest.TestCase): + +class GeneralTest(unittest.TestCase): + def setUp(self): self.save_stdout = sys.stdout self.save_stderr = sys.stderr @@ -122,8 +125,43 @@ self.assertEqual(l, [5]) +class SubinterpreterTest(unittest.TestCase): + + def test_callbacks_leak(self): + # This test shows a leak in refleak mode if atexit doesn't + # take care to free callbacks in its per-subinterpreter module + # state. + n = atexit._ncallbacks() + code = r"""if 1: + import atexit + def f(): + pass + atexit.register(f) + del atexit + """ + ret = _testcapi.run_in_subinterp(code) + self.assertEqual(ret, 0) + self.assertEqual(atexit._ncallbacks(), n) + + def test_callbacks_leak_refcycle(self): + # Similar to the above, but with a refcycle through the atexit + # module. + n = atexit._ncallbacks() + code = r"""if 1: + import atexit + def f(): + pass + atexit.register(f) + atexit.__atexit = atexit + """ + ret = _testcapi.run_in_subinterp(code) + self.assertEqual(ret, 0) + self.assertEqual(atexit._ncallbacks(), n) + + def test_main(): - support.run_unittest(TestCase) + support.run_unittest(__name__) + if __name__ == "__main__": test_main() diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -179,6 +179,9 @@ Library ------- +- Issue #18619: Fix atexit leaking callbacks registered from sub-interpreters, + and make it GC-aware. + - Issue #15699: The readline module now uses PEP 3121-style module initialization, so as to reclaim allocated resources (Python callbacks) at shutdown. Original patch by Robin Schreiber. diff --git a/Modules/atexitmodule.c b/Modules/atexitmodule.c --- a/Modules/atexitmodule.c +++ b/Modules/atexitmodule.c @@ -10,8 +10,6 @@ /* Forward declaration (for atexit_cleanup) */ static PyObject *atexit_clear(PyObject*, PyObject*); -/* Forward declaration (for atexit_callfuncs) */ -static void atexit_cleanup(PyObject*); /* Forward declaration of module object */ static struct PyModuleDef atexitmodule; @@ -33,6 +31,35 @@ #define GET_ATEXIT_STATE(mod) ((atexitmodule_state*)PyModule_GetState(mod)) +static void +atexit_delete_cb(atexitmodule_state *modstate, int i) +{ + atexit_callback *cb; + + cb = modstate->atexit_callbacks[i]; + modstate->atexit_callbacks[i] = NULL; + Py_DECREF(cb->func); + Py_DECREF(cb->args); + Py_XDECREF(cb->kwargs); + PyMem_Free(cb); +} + +/* Clear all callbacks without calling them */ +static void +atexit_cleanup(atexitmodule_state *modstate) +{ + atexit_callback *cb; + int i; + for (i = 0; i < modstate->ncallbacks; i++) { + cb = modstate->atexit_callbacks[i]; + if (cb == NULL) + continue; + + atexit_delete_cb(modstate, i); + } + modstate->ncallbacks = 0; +} + /* Installed into pythonrun.c's atexit mechanism */ static void @@ -78,34 +105,12 @@ } } - atexit_cleanup(module); + atexit_cleanup(modstate); if (exc_type) PyErr_Restore(exc_type, exc_value, exc_tb); } -static void -atexit_delete_cb(PyObject *self, int i) -{ - atexitmodule_state *modstate; - atexit_callback *cb; - - modstate = GET_ATEXIT_STATE(self); - cb = modstate->atexit_callbacks[i]; - modstate->atexit_callbacks[i] = NULL; - Py_DECREF(cb->func); - Py_DECREF(cb->args); - Py_XDECREF(cb->kwargs); - PyMem_Free(cb); -} - -static void -atexit_cleanup(PyObject *self) -{ - PyObject *r = atexit_clear(self, NULL); - Py_DECREF(r); -} - /* ===================================================================== */ /* Module methods. */ @@ -194,21 +199,50 @@ static PyObject * atexit_clear(PyObject *self, PyObject *unused) { + atexit_cleanup(GET_ATEXIT_STATE(self)); + Py_RETURN_NONE; +} + +PyDoc_STRVAR(atexit_ncallbacks__doc__, +"_ncallbacks() -> int\n\ +\n\ +Return the number of registered exit functions."); + +static PyObject * +atexit_ncallbacks(PyObject *self, PyObject *unused) +{ atexitmodule_state *modstate; - atexit_callback *cb; - int i; modstate = GET_ATEXIT_STATE(self); + return PyLong_FromSsize_t(modstate->ncallbacks); +} + +static int +atexit_m_traverse(PyObject *self, visitproc visit, void *arg) +{ + int i; + atexitmodule_state *modstate; + + modstate = GET_ATEXIT_STATE(self); for (i = 0; i < modstate->ncallbacks; i++) { - cb = modstate->atexit_callbacks[i]; + atexit_callback *cb = modstate->atexit_callbacks[i]; if (cb == NULL) continue; + Py_VISIT(cb->func); + Py_VISIT(cb->args); + Py_VISIT(cb->kwargs); + } + return 0; +} - atexit_delete_cb(self, i); - } - modstate->ncallbacks = 0; - Py_RETURN_NONE; +static int +atexit_m_clear(PyObject *self) +{ + atexitmodule_state *modstate; + modstate = GET_ATEXIT_STATE(self); + atexit_cleanup(modstate); + return 0; } static void @@ -216,6 +250,7 @@ { atexitmodule_state *modstate; modstate = GET_ATEXIT_STATE(m); + atexit_cleanup(modstate); PyMem_Free(modstate->atexit_callbacks); } @@ -246,7 +281,7 @@ if (eq < 0) return NULL; if (eq) - atexit_delete_cb(self, i); + atexit_delete_cb(modstate, i); } Py_RETURN_NONE; } @@ -260,6 +295,8 @@ atexit_unregister__doc__}, {"_run_exitfuncs", (PyCFunction) atexit_run_exitfuncs, METH_NOARGS, atexit_run_exitfuncs__doc__}, + {"_ncallbacks", (PyCFunction) atexit_ncallbacks, METH_NOARGS, + atexit_ncallbacks__doc__}, {NULL, NULL} /* sentinel */ }; @@ -275,15 +312,15 @@ static struct PyModuleDef atexitmodule = { - PyModuleDef_HEAD_INIT, - "atexit", - atexit__doc__, - sizeof(atexitmodule_state), - atexit_methods, - NULL, - NULL, - NULL, - (freefunc)atexit_free + PyModuleDef_HEAD_INIT, + "atexit", + atexit__doc__, + sizeof(atexitmodule_state), + atexit_methods, + NULL, + atexit_m_traverse, + atexit_m_clear, + (freefunc)atexit_free }; PyMODINIT_FUNC -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 21:05:18 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 1 Aug 2013 21:05:18 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318608=3A_Avoid_ke?= =?utf-8?q?eping_a_strong_reference_to_the_locale_module_inside_the?= Message-ID: <3c5gtk3nqnz7Lkb@mail.python.org> http://hg.python.org/cpython/rev/1c9aa4f68f2b changeset: 84954:1c9aa4f68f2b user: Antoine Pitrou date: Thu Aug 01 21:04:50 2013 +0200 summary: Issue #18608: Avoid keeping a strong reference to the locale module inside the _io module. files: Misc/NEWS | 3 + Modules/_io/_iomodule.c | 25 +++++++++++++++ Modules/_io/_iomodule.h | 2 + Modules/_io/textio.c | 48 ++++++++++++---------------- 4 files changed, 51 insertions(+), 27 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -179,6 +179,9 @@ Library ------- +- Issue #18608: Avoid keeping a strong reference to the locale module + inside the _io module. + - Issue #18619: Fix atexit leaking callbacks registered from sub-interpreters, and make it GC-aware. diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c --- a/Modules/_io/_iomodule.c +++ b/Modules/_io/_iomodule.c @@ -533,6 +533,31 @@ } +PyObject * +_PyIO_get_locale_module(_PyIO_State *state) +{ + PyObject *mod; + if (state->locale_module != NULL) { + assert(PyWeakref_CheckRef(state->locale_module)); + mod = PyWeakref_GET_OBJECT(state->locale_module); + if (mod != Py_None) { + Py_INCREF(mod); + return mod; + } + Py_CLEAR(state->locale_module); + } + mod = PyImport_ImportModule("locale"); + if (mod == NULL) + return NULL; + state->locale_module = PyWeakref_NewRef(mod, NULL); + if (state->locale_module == NULL) { + Py_DECREF(mod); + return NULL; + } + return mod; +} + + static int iomodule_traverse(PyObject *mod, visitproc visit, void *arg) { _PyIO_State *state = IO_MOD_STATE(mod); diff --git a/Modules/_io/_iomodule.h b/Modules/_io/_iomodule.h --- a/Modules/_io/_iomodule.h +++ b/Modules/_io/_iomodule.h @@ -137,6 +137,8 @@ #define IO_MOD_STATE(mod) ((_PyIO_State *)PyModule_GetState(mod)) #define IO_STATE IO_MOD_STATE(PyState_FindModule(&_PyIO_Module)) +extern PyObject *_PyIO_get_locale_module(_PyIO_State *); + extern PyObject *_PyIO_str_close; extern PyObject *_PyIO_str_closed; extern PyObject *_PyIO_str_decode; diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -917,35 +917,29 @@ } } if (encoding == NULL && self->encoding == NULL) { - if (state->locale_module == NULL) { - state->locale_module = PyImport_ImportModule("locale"); - if (state->locale_module == NULL) - goto catch_ImportError; + PyObject *locale_module = _PyIO_get_locale_module(state); + if (locale_module == NULL) + goto catch_ImportError; + self->encoding = _PyObject_CallMethodId( + locale_module, &PyId_getpreferredencoding, "O", Py_False); + Py_DECREF(locale_module); + if (self->encoding == NULL) { + catch_ImportError: + /* + Importing locale can raise a ImportError because of + _functools, and locale.getpreferredencoding can raise a + ImportError if _locale is not available. These will happen + during module building. + */ + if (PyErr_ExceptionMatches(PyExc_ImportError)) { + PyErr_Clear(); + self->encoding = PyUnicode_FromString("ascii"); + } else - goto use_locale; + goto error; } - else { - use_locale: - self->encoding = _PyObject_CallMethodId( - state->locale_module, &PyId_getpreferredencoding, "O", Py_False); - if (self->encoding == NULL) { - catch_ImportError: - /* - Importing locale can raise a ImportError because of - _functools, and locale.getpreferredencoding can raise a - ImportError if _locale is not available. These will happen - during module building. - */ - if (PyErr_ExceptionMatches(PyExc_ImportError)) { - PyErr_Clear(); - self->encoding = PyUnicode_FromString("ascii"); - } - else - goto error; - } - else if (!PyUnicode_Check(self->encoding)) - Py_CLEAR(self->encoding); - } + else if (!PyUnicode_Check(self->encoding)) + Py_CLEAR(self->encoding); } if (self->encoding != NULL) { encoding = _PyUnicode_AsString(self->encoding); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 21:14:58 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 1 Aug 2013 21:14:58 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NTg5?= =?utf-8?q?=3A_fix_hyperlinking_of_type_slots_=28tp=5F*=29?= Message-ID: <3c5h5t6JZBz7Ljn@mail.python.org> http://hg.python.org/cpython/rev/a381721299a3 changeset: 84955:a381721299a3 branch: 3.3 parent: 84942:279c8c8e433d user: Antoine Pitrou date: Thu Aug 01 21:12:45 2013 +0200 summary: Issue #18589: fix hyperlinking of type slots (tp_*) files: Doc/c-api/allocation.rst | 6 +- Doc/c-api/exceptions.rst | 12 +- Doc/c-api/gcsupport.rst | 28 +- Doc/c-api/type.rst | 10 +- Doc/c-api/typeobj.rst | 218 ++++++++++++------------ Doc/extending/newtypes.rst | 94 +++++----- Doc/library/gc.rst | 4 +- Doc/library/stdtypes.rst | 6 +- Doc/whatsnew/2.2.rst | 6 +- 9 files changed, 192 insertions(+), 192 deletions(-) diff --git a/Doc/c-api/allocation.rst b/Doc/c-api/allocation.rst --- a/Doc/c-api/allocation.rst +++ b/Doc/c-api/allocation.rst @@ -32,7 +32,7 @@ Allocate a new Python object using the C structure type *TYPE* and the Python type object *type*. Fields not defined by the Python object header are not initialized; the object's reference count will be one. The size of - the memory allocation is determined from the :attr:`tp_basicsize` field of + the memory allocation is determined from the :c:member:`~PyTypeObject.tp_basicsize` field of the type object. @@ -41,7 +41,7 @@ Allocate a new Python object using the C structure type *TYPE* and the Python type object *type*. Fields not defined by the Python object header are not initialized. The allocated memory allows for the *TYPE* structure - plus *size* fields of the size given by the :attr:`tp_itemsize` field of + plus *size* fields of the size given by the :c:member:`~PyTypeObject.tp_itemsize` field of *type*. This is useful for implementing objects like tuples, which are able to determine their size at construction time. Embedding the array of fields into the same allocation decreases the number of allocations, @@ -52,7 +52,7 @@ Releases memory allocated to an object using :c:func:`PyObject_New` or :c:func:`PyObject_NewVar`. This is normally called from the - :attr:`tp_dealloc` handler specified in the object's type. The fields of + :c:member:`~PyTypeObject.tp_dealloc` handler specified in the object's type. The fields of the object should not be accessed after this call as the memory is no longer a valid Python object. diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst --- a/Doc/c-api/exceptions.rst +++ b/Doc/c-api/exceptions.rst @@ -607,28 +607,28 @@ Ends a :c:func:`Py_EnterRecursiveCall`. Must be called once for each *successful* invocation of :c:func:`Py_EnterRecursiveCall`. -Properly implementing :attr:`tp_repr` for container types requires +Properly implementing :c:member:`~PyTypeObject.tp_repr` for container types requires special recursion handling. In addition to protecting the stack, -:attr:`tp_repr` also needs to track objects to prevent cycles. The +:c:member:`~PyTypeObject.tp_repr` also needs to track objects to prevent cycles. The following two functions facilitate this functionality. Effectively, these are the C equivalent to :func:`reprlib.recursive_repr`. .. c:function:: int Py_ReprEnter(PyObject *object) - Called at the beginning of the :attr:`tp_repr` implementation to + Called at the beginning of the :c:member:`~PyTypeObject.tp_repr` implementation to detect cycles. If the object has already been processed, the function returns a - positive integer. In that case the :attr:`tp_repr` implementation + positive integer. In that case the :c:member:`~PyTypeObject.tp_repr` implementation should return a string object indicating a cycle. As examples, :class:`dict` objects return ``{...}`` and :class:`list` objects return ``[...]``. The function will return a negative integer if the recursion limit - is reached. In that case the :attr:`tp_repr` implementation should + is reached. In that case the :c:member:`~PyTypeObject.tp_repr` implementation should typically return ``NULL``. - Otherwise, the function returns zero and the :attr:`tp_repr` + Otherwise, the function returns zero and the :c:member:`~PyTypeObject.tp_repr` implementation can continue normally. .. c:function:: void Py_ReprLeave(PyObject *object) diff --git a/Doc/c-api/gcsupport.rst b/Doc/c-api/gcsupport.rst --- a/Doc/c-api/gcsupport.rst +++ b/Doc/c-api/gcsupport.rst @@ -12,10 +12,10 @@ or strings), do not need to provide any explicit support for garbage collection. -To create a container type, the :attr:`tp_flags` field of the type object must +To create a container type, the :c:member:`~PyTypeObject.tp_flags` field of the type object must include the :const:`Py_TPFLAGS_HAVE_GC` and provide an implementation of the -:attr:`tp_traverse` handler. If instances of the type are mutable, a -:attr:`tp_clear` implementation must also be provided. +:c:member:`~PyTypeObject.tp_traverse` handler. If instances of the type are mutable, a +:c:member:`~PyTypeObject.tp_clear` implementation must also be provided. .. data:: Py_TPFLAGS_HAVE_GC @@ -57,7 +57,7 @@ Adds the object *op* to the set of container objects tracked by the collector. The collector can run at unexpected times so objects must be valid while being tracked. This should be called once all the fields - followed by the :attr:`tp_traverse` handler become valid, usually near the + followed by the :c:member:`~PyTypeObject.tp_traverse` handler become valid, usually near the end of the constructor. @@ -86,8 +86,8 @@ Remove the object *op* from the set of container objects tracked by the collector. Note that :c:func:`PyObject_GC_Track` can be called again on this object to add it back to the set of tracked objects. The deallocator - (:attr:`tp_dealloc` handler) should call this for the object before any of - the fields used by the :attr:`tp_traverse` handler become invalid. + (:c:member:`~PyTypeObject.tp_dealloc` handler) should call this for the object before any of + the fields used by the :c:member:`~PyTypeObject.tp_traverse` handler become invalid. .. c:function:: void _PyObject_GC_UNTRACK(PyObject *op) @@ -95,19 +95,19 @@ A macro version of :c:func:`PyObject_GC_UnTrack`. It should not be used for extension modules. -The :attr:`tp_traverse` handler accepts a function parameter of this type: +The :c:member:`~PyTypeObject.tp_traverse` handler accepts a function parameter of this type: .. c:type:: int (*visitproc)(PyObject *object, void *arg) - Type of the visitor function passed to the :attr:`tp_traverse` handler. + Type of the visitor function passed to the :c:member:`~PyTypeObject.tp_traverse` handler. The function should be called with an object to traverse as *object* and - the third parameter to the :attr:`tp_traverse` handler as *arg*. The + the third parameter to the :c:member:`~PyTypeObject.tp_traverse` handler as *arg*. The Python core uses several visitor functions to implement cyclic garbage detection; it's not expected that users will need to write their own visitor functions. -The :attr:`tp_traverse` handler must have the following type: +The :c:member:`~PyTypeObject.tp_traverse` handler must have the following type: .. c:type:: int (*traverseproc)(PyObject *self, visitproc visit, void *arg) @@ -119,15 +119,15 @@ object argument. If *visit* returns a non-zero value that value should be returned immediately. -To simplify writing :attr:`tp_traverse` handlers, a :c:func:`Py_VISIT` macro is -provided. In order to use this macro, the :attr:`tp_traverse` implementation +To simplify writing :c:member:`~PyTypeObject.tp_traverse` handlers, a :c:func:`Py_VISIT` macro is +provided. In order to use this macro, the :c:member:`~PyTypeObject.tp_traverse` implementation must name its arguments exactly *visit* and *arg*: .. c:function:: void Py_VISIT(PyObject *o) Call the *visit* callback, with arguments *o* and *arg*. If *visit* returns - a non-zero value, then return it. Using this macro, :attr:`tp_traverse` + a non-zero value, then return it. Using this macro, :c:member:`~PyTypeObject.tp_traverse` handlers look like:: static int @@ -138,7 +138,7 @@ return 0; } -The :attr:`tp_clear` handler must be of the :c:type:`inquiry` type, or *NULL* +The :c:member:`~PyTypeObject.tp_clear` handler must be of the :c:type:`inquiry` type, or *NULL* if the object is immutable. diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -37,10 +37,10 @@ .. c:function:: long PyType_GetFlags(PyTypeObject* type) - Return the :attr:`tp_flags` member of *type*. This function is primarily + Return the :c:member:`~PyTypeObject.tp_flags` member of *type*. This function is primarily meant for use with `Py_LIMITED_API`; the individual flag bits are guaranteed to be stable across Python releases, but access to - :attr:`tp_flags` itself is not part of the limited API. + :c:member:`~PyTypeObject.tp_flags` itself is not part of the limited API. .. versionadded:: 3.2 @@ -70,14 +70,14 @@ .. c:function:: PyObject* PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems) - Generic handler for the :attr:`tp_alloc` slot of a type object. Use + Generic handler for the :c:member:`~PyTypeObject.tp_alloc` slot of a type object. Use Python's default memory allocation mechanism to allocate a new instance and initialize all its contents to *NULL*. .. c:function:: PyObject* PyType_GenericNew(PyTypeObject *type, PyObject *args, PyObject *kwds) - Generic handler for the :attr:`tp_new` slot of a type object. Create a - new instance using the type's :attr:`tp_alloc` slot. + Generic handler for the :c:member:`~PyTypeObject.tp_new` slot of a type object. Create a + new instance using the type's :c:member:`~PyTypeObject.tp_alloc` slot. .. c:function:: int PyType_Ready(PyTypeObject *type) diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -35,7 +35,7 @@ The type object structure extends the :c:type:`PyVarObject` structure. The :attr:`ob_size` field is used for dynamic types (created by :func:`type_new`, usually called from a class statement). Note that :c:data:`PyType_Type` (the -metatype) initializes :attr:`tp_itemsize`, which means that its instances (i.e. +metatype) initializes :c:member:`~PyTypeObject.tp_itemsize`, which means that its instances (i.e. type objects) *must* have the :attr:`ob_size` field. @@ -102,7 +102,7 @@ should be just the type name. If the module is a submodule of a package, the full package name is part of the full module name. For example, a type named :class:`T` defined in module :mod:`M` in subpackage :mod:`Q` in package :mod:`P` - should have the :attr:`tp_name` initializer ``"P.Q.M.T"``. + should have the :c:member:`~PyTypeObject.tp_name` initializer ``"P.Q.M.T"``. For dynamically allocated type objects, this should just be the type name, and the module name explicitly stored in the type dict as the value for key @@ -113,7 +113,7 @@ attribute, and everything after the last dot is made accessible as the :attr:`__name__` attribute. - If no dot is present, the entire :attr:`tp_name` field is made accessible as the + If no dot is present, the entire :c:member:`~PyTypeObject.tp_name` field is made accessible as the :attr:`__name__` attribute, and the :attr:`__module__` attribute is undefined (unless explicitly set in the dictionary, as explained above). This means your type will be impossible to pickle. @@ -127,13 +127,13 @@ These fields allow calculating the size in bytes of instances of the type. There are two kinds of types: types with fixed-length instances have a zero - :attr:`tp_itemsize` field, types with variable-length instances have a non-zero - :attr:`tp_itemsize` field. For a type with fixed-length instances, all - instances have the same size, given in :attr:`tp_basicsize`. + :c:member:`~PyTypeObject.tp_itemsize` field, types with variable-length instances have a non-zero + :c:member:`~PyTypeObject.tp_itemsize` field. For a type with fixed-length instances, all + instances have the same size, given in :c:member:`~PyTypeObject.tp_basicsize`. For a type with variable-length instances, the instances must have an - :attr:`ob_size` field, and the instance size is :attr:`tp_basicsize` plus N - times :attr:`tp_itemsize`, where N is the "length" of the object. The value of + :attr:`ob_size` field, and the instance size is :c:member:`~PyTypeObject.tp_basicsize` plus N + times :c:member:`~PyTypeObject.tp_itemsize`, where N is the "length" of the object. The value of N is typically stored in the instance's :attr:`ob_size` field. There are exceptions: for example, ints use a negative :attr:`ob_size` to indicate a negative number, and N is ``abs(ob_size)`` there. Also, the presence of an @@ -146,20 +146,20 @@ :c:macro:`PyObject_HEAD` or :c:macro:`PyObject_VAR_HEAD` (whichever is used to declare the instance struct) and this in turn includes the :attr:`_ob_prev` and :attr:`_ob_next` fields if they are present. This means that the only correct - way to get an initializer for the :attr:`tp_basicsize` is to use the + way to get an initializer for the :c:member:`~PyTypeObject.tp_basicsize` is to use the ``sizeof`` operator on the struct used to declare the instance layout. The basic size does not include the GC header size. These fields are inherited separately by subtypes. If the base type has a - non-zero :attr:`tp_itemsize`, it is generally not safe to set - :attr:`tp_itemsize` to a different non-zero value in a subtype (though this + non-zero :c:member:`~PyTypeObject.tp_itemsize`, it is generally not safe to set + :c:member:`~PyTypeObject.tp_itemsize` to a different non-zero value in a subtype (though this depends on the implementation of the base type). A note about alignment: if the variable items require a particular alignment, - this should be taken care of by the value of :attr:`tp_basicsize`. Example: - suppose a type implements an array of ``double``. :attr:`tp_itemsize` is + this should be taken care of by the value of :c:member:`~PyTypeObject.tp_basicsize`. Example: + suppose a type implements an array of ``double``. :c:member:`~PyTypeObject.tp_itemsize` is ``sizeof(double)``. It is the programmer's responsibility that - :attr:`tp_basicsize` is a multiple of ``sizeof(double)`` (assuming this is the + :c:member:`~PyTypeObject.tp_basicsize` is a multiple of ``sizeof(double)`` (assuming this is the alignment requirement for ``double``). @@ -175,10 +175,10 @@ destructor function should free all references which the instance owns, free all memory buffers owned by the instance (using the freeing function corresponding to the allocation function used to allocate the buffer), and finally (as its - last action) call the type's :attr:`tp_free` function. If the type is not + last action) call the type's :c:member:`~PyTypeObject.tp_free` function. If the type is not subtypable (doesn't have the :const:`Py_TPFLAGS_BASETYPE` flag bit set), it is permissible to call the object deallocator directly instead of via - :attr:`tp_free`. The object deallocator should be the one used to allocate the + :c:member:`~PyTypeObject.tp_free`. The object deallocator should be the one used to allocate the instance; this is normally :c:func:`PyObject_Del` if the instance was allocated using :c:func:`PyObject_New` or :c:func:`PyObject_VarNew`, or :c:func:`PyObject_GC_Del` if the instance was allocated using @@ -193,25 +193,25 @@ The print function is only called when the instance is printed to a *real* file; when it is printed to a pseudo-file (like a :class:`StringIO` instance), the - instance's :attr:`tp_repr` or :attr:`tp_str` function is called to convert it to - a string. These are also called when the type's :attr:`tp_print` field is - *NULL*. A type should never implement :attr:`tp_print` in a way that produces - different output than :attr:`tp_repr` or :attr:`tp_str` would. + instance's :c:member:`~PyTypeObject.tp_repr` or :c:member:`~PyTypeObject.tp_str` function is called to convert it to + a string. These are also called when the type's :c:member:`~PyTypeObject.tp_print` field is + *NULL*. A type should never implement :c:member:`~PyTypeObject.tp_print` in a way that produces + different output than :c:member:`~PyTypeObject.tp_repr` or :c:member:`~PyTypeObject.tp_str` would. The print function is called with the same signature as :c:func:`PyObject_Print`: ``int tp_print(PyObject *self, FILE *file, int flags)``. The *self* argument is the instance to be printed. The *file* argument is the stdio file to which it is to be printed. The *flags* argument is composed of flag bits. The only flag bit currently defined is :const:`Py_PRINT_RAW`. When the :const:`Py_PRINT_RAW` - flag bit is set, the instance should be printed the same way as :attr:`tp_str` + flag bit is set, the instance should be printed the same way as :c:member:`~PyTypeObject.tp_str` would format it; when the :const:`Py_PRINT_RAW` flag bit is clear, the instance - should be printed the same was as :attr:`tp_repr` would format it. It should + should be printed the same was as :c:member:`~PyTypeObject.tp_repr` would format it. It should return ``-1`` and set an exception condition when an error occurred during the comparison. - It is possible that the :attr:`tp_print` field will be deprecated. In any case, - it is recommended not to define :attr:`tp_print`, but instead to rely on - :attr:`tp_repr` and :attr:`tp_str` for printing. + It is possible that the :c:member:`~PyTypeObject.tp_print` field will be deprecated. In any case, + it is recommended not to define :c:member:`~PyTypeObject.tp_print`, but instead to rely on + :c:member:`~PyTypeObject.tp_repr` and :c:member:`~PyTypeObject.tp_str` for printing. This field is inherited by subtypes. @@ -221,13 +221,13 @@ An optional pointer to the get-attribute-string function. This field is deprecated. When it is defined, it should point to a function - that acts the same as the :attr:`tp_getattro` function, but taking a C string + that acts the same as the :c:member:`~PyTypeObject.tp_getattro` function, but taking a C string instead of a Python string object to give the attribute name. The signature is the same as for :c:func:`PyObject_GetAttrString`. - This field is inherited by subtypes together with :attr:`tp_getattro`: a subtype - inherits both :attr:`tp_getattr` and :attr:`tp_getattro` from its base type when - the subtype's :attr:`tp_getattr` and :attr:`tp_getattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_getattro`: a subtype + inherits both :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` are both *NULL*. .. c:member:: setattrfunc PyTypeObject.tp_setattr @@ -235,13 +235,13 @@ An optional pointer to the set-attribute-string function. This field is deprecated. When it is defined, it should point to a function - that acts the same as the :attr:`tp_setattro` function, but taking a C string + that acts the same as the :c:member:`~PyTypeObject.tp_setattro` function, but taking a C string instead of a Python string object to give the attribute name. The signature is the same as for :c:func:`PyObject_SetAttrString`. - This field is inherited by subtypes together with :attr:`tp_setattro`: a subtype - inherits both :attr:`tp_setattr` and :attr:`tp_setattro` from its base type when - the subtype's :attr:`tp_setattr` and :attr:`tp_setattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_setattro`: a subtype + inherits both :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` are both *NULL*. .. c:member:: void* PyTypeObject.tp_reserved @@ -275,7 +275,7 @@ objects which implement the number protocol. These fields are documented in :ref:`number-structs`. - The :attr:`tp_as_number` field is not inherited, but the contained fields are + The :c:member:`~PyTypeObject.tp_as_number` field is not inherited, but the contained fields are inherited individually. @@ -285,7 +285,7 @@ objects which implement the sequence protocol. These fields are documented in :ref:`sequence-structs`. - The :attr:`tp_as_sequence` field is not inherited, but the contained fields + The :c:member:`~PyTypeObject.tp_as_sequence` field is not inherited, but the contained fields are inherited individually. @@ -295,7 +295,7 @@ objects which implement the mapping protocol. These fields are documented in :ref:`mapping-structs`. - The :attr:`tp_as_mapping` field is not inherited, but the contained fields + The :c:member:`~PyTypeObject.tp_as_mapping` field is not inherited, but the contained fields are inherited individually. @@ -323,9 +323,9 @@ object raises :exc:`TypeError`. This field is inherited by subtypes together with - :attr:`tp_richcompare`: a subtype inherits both of - :attr:`tp_richcompare` and :attr:`tp_hash`, when the subtype's - :attr:`tp_richcompare` and :attr:`tp_hash` are both *NULL*. + :c:member:`~PyTypeObject.tp_richcompare`: a subtype inherits both of + :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash`, when the subtype's + :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash` are both *NULL*. .. c:member:: ternaryfunc PyTypeObject.tp_call @@ -363,9 +363,9 @@ convenient to set this field to :c:func:`PyObject_GenericGetAttr`, which implements the normal way of looking for object attributes. - This field is inherited by subtypes together with :attr:`tp_getattr`: a subtype - inherits both :attr:`tp_getattr` and :attr:`tp_getattro` from its base type when - the subtype's :attr:`tp_getattr` and :attr:`tp_getattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_getattr`: a subtype + inherits both :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` are both *NULL*. .. c:member:: setattrofunc PyTypeObject.tp_setattro @@ -376,9 +376,9 @@ convenient to set this field to :c:func:`PyObject_GenericSetAttr`, which implements the normal way of setting object attributes. - This field is inherited by subtypes together with :attr:`tp_setattr`: a subtype - inherits both :attr:`tp_setattr` and :attr:`tp_setattro` from its base type when - the subtype's :attr:`tp_setattr` and :attr:`tp_setattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_setattr`: a subtype + inherits both :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` are both *NULL*. .. c:member:: PyBufferProcs* PyTypeObject.tp_as_buffer @@ -387,7 +387,7 @@ which implement the buffer interface. These fields are documented in :ref:`buffer-structs`. - The :attr:`tp_as_buffer` field is not inherited, but the contained fields are + The :c:member:`~PyTypeObject.tp_as_buffer` field is not inherited, but the contained fields are inherited individually. @@ -396,8 +396,8 @@ This field is a bit mask of various flags. Some flags indicate variant semantics for certain situations; others are used to indicate that certain fields in the type object (or in the extension structures referenced via - :attr:`tp_as_number`, :attr:`tp_as_sequence`, :attr:`tp_as_mapping`, and - :attr:`tp_as_buffer`) that were historically not always present are valid; if + :c:member:`~PyTypeObject.tp_as_number`, :c:member:`~PyTypeObject.tp_as_sequence`, :c:member:`~PyTypeObject.tp_as_mapping`, and + :c:member:`~PyTypeObject.tp_as_buffer`) that were historically not always present are valid; if such a flag bit is clear, the type fields it guards must not be accessed and must be considered to have a zero or *NULL* value instead. @@ -407,13 +407,13 @@ inherited if the extension structure is inherited, i.e. the base type's value of the flag bit is copied into the subtype together with a pointer to the extension structure. The :const:`Py_TPFLAGS_HAVE_GC` flag bit is inherited together with - the :attr:`tp_traverse` and :attr:`tp_clear` fields, i.e. if the + the :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` fields, i.e. if the :const:`Py_TPFLAGS_HAVE_GC` flag bit is clear in the subtype and the - :attr:`tp_traverse` and :attr:`tp_clear` fields in the subtype exist and have + :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` fields in the subtype exist and have *NULL* values. The following bit masks are currently defined; these can be ORed together using - the ``|`` operator to form the value of the :attr:`tp_flags` field. The macro + the ``|`` operator to form the value of the :c:member:`~PyTypeObject.tp_flags` field. The macro :c:func:`PyType_HasFeature` takes a type and a flags value, *tp* and *f*, and checks whether ``tp->tp_flags & f`` is non-zero. @@ -453,7 +453,7 @@ is set, instances must be created using :c:func:`PyObject_GC_New` and destroyed using :c:func:`PyObject_GC_Del`. More information in section :ref:`supporting-cycle-detection`. This bit also implies that the - GC-related fields :attr:`tp_traverse` and :attr:`tp_clear` are present in + GC-related fields :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` are present in the type object. @@ -481,8 +481,8 @@ about Python's garbage collection scheme can be found in section :ref:`supporting-cycle-detection`. - The :attr:`tp_traverse` pointer is used by the garbage collector to detect - reference cycles. A typical implementation of a :attr:`tp_traverse` function + The :c:member:`~PyTypeObject.tp_traverse` pointer is used by the garbage collector to detect + reference cycles. A typical implementation of a :c:member:`~PyTypeObject.tp_traverse` function simply calls :c:func:`Py_VISIT` on each of the instance's members that are Python objects. For example, this is function :c:func:`local_traverse` from the :mod:`_thread` extension module:: @@ -508,9 +508,9 @@ :c:func:`local_traverse` to have these specific names; don't name them just anything. - This field is inherited by subtypes together with :attr:`tp_clear` and the - :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :attr:`tp_traverse`, and - :attr:`tp_clear` are all inherited from the base type if they are all zero in + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_clear` and the + :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :c:member:`~PyTypeObject.tp_traverse`, and + :c:member:`~PyTypeObject.tp_clear` are all inherited from the base type if they are all zero in the subtype. @@ -519,17 +519,17 @@ An optional pointer to a clear function for the garbage collector. This is only used if the :const:`Py_TPFLAGS_HAVE_GC` flag bit is set. - The :attr:`tp_clear` member function is used to break reference cycles in cyclic - garbage detected by the garbage collector. Taken together, all :attr:`tp_clear` + The :c:member:`~PyTypeObject.tp_clear` member function is used to break reference cycles in cyclic + garbage detected by the garbage collector. Taken together, all :c:member:`~PyTypeObject.tp_clear` functions in the system must combine to break all reference cycles. This is - subtle, and if in any doubt supply a :attr:`tp_clear` function. For example, - the tuple type does not implement a :attr:`tp_clear` function, because it's + subtle, and if in any doubt supply a :c:member:`~PyTypeObject.tp_clear` function. For example, + the tuple type does not implement a :c:member:`~PyTypeObject.tp_clear` function, because it's possible to prove that no reference cycle can be composed entirely of tuples. - Therefore the :attr:`tp_clear` functions of other types must be sufficient to + Therefore the :c:member:`~PyTypeObject.tp_clear` functions of other types must be sufficient to break any cycle containing a tuple. This isn't immediately obvious, and there's - rarely a good reason to avoid implementing :attr:`tp_clear`. + rarely a good reason to avoid implementing :c:member:`~PyTypeObject.tp_clear`. - Implementations of :attr:`tp_clear` should drop the instance's references to + Implementations of :c:member:`~PyTypeObject.tp_clear` should drop the instance's references to those of its members that may be Python objects, and set its pointers to those members to *NULL*, as in the following example:: @@ -554,18 +554,18 @@ so that *self* knows the contained object can no longer be used. The :c:func:`Py_CLEAR` macro performs the operations in a safe order. - Because the goal of :attr:`tp_clear` functions is to break reference cycles, + Because the goal of :c:member:`~PyTypeObject.tp_clear` functions is to break reference cycles, it's not necessary to clear contained objects like Python strings or Python integers, which can't participate in reference cycles. On the other hand, it may be convenient to clear all contained Python objects, and write the type's - :attr:`tp_dealloc` function to invoke :attr:`tp_clear`. + :c:member:`~PyTypeObject.tp_dealloc` function to invoke :c:member:`~PyTypeObject.tp_clear`. More information about Python's garbage collection scheme can be found in section :ref:`supporting-cycle-detection`. - This field is inherited by subtypes together with :attr:`tp_traverse` and the - :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :attr:`tp_traverse`, and - :attr:`tp_clear` are all inherited from the base type if they are all zero in + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_traverse` and the + :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :c:member:`~PyTypeObject.tp_traverse`, and + :c:member:`~PyTypeObject.tp_clear` are all inherited from the base type if they are all zero in the subtype. @@ -585,13 +585,13 @@ comparisons makes sense (e.g. ``==`` and ``!=``, but not ``<`` and friends), directly raise :exc:`TypeError` in the rich comparison function. - This field is inherited by subtypes together with :attr:`tp_hash`: - a subtype inherits :attr:`tp_richcompare` and :attr:`tp_hash` when - the subtype's :attr:`tp_richcompare` and :attr:`tp_hash` are both + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_hash`: + a subtype inherits :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash` when + the subtype's :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash` are both *NULL*. The following constants are defined to be used as the third argument for - :attr:`tp_richcompare` and for :c:func:`PyObject_RichCompare`: + :c:member:`~PyTypeObject.tp_richcompare` and for :c:func:`PyObject_RichCompare`: +----------------+------------+ | Constant | Comparison | @@ -619,26 +619,26 @@ instance structure needs to include a field of type :c:type:`PyObject\*` which is initialized to *NULL*. - Do not confuse this field with :attr:`tp_weaklist`; that is the list head for + Do not confuse this field with :c:member:`~PyTypeObject.tp_weaklist`; that is the list head for weak references to the type object itself. This field is inherited by subtypes, but see the rules listed below. A subtype may override this offset; this means that the subtype uses a different weak reference list head than the base type. Since the list head is always found via - :attr:`tp_weaklistoffset`, this should not be a problem. + :c:member:`~PyTypeObject.tp_weaklistoffset`, this should not be a problem. When a type defined by a class statement has no :attr:`__slots__` declaration, and none of its base types are weakly referenceable, the type is made weakly referenceable by adding a weak reference list head slot to the instance layout - and setting the :attr:`tp_weaklistoffset` of that slot's offset. + and setting the :c:member:`~PyTypeObject.tp_weaklistoffset` of that slot's offset. When a type's :attr:`__slots__` declaration contains a slot named :attr:`__weakref__`, that slot becomes the weak reference list head for instances of the type, and the slot's offset is stored in the type's - :attr:`tp_weaklistoffset`. + :c:member:`~PyTypeObject.tp_weaklistoffset`. When a type's :attr:`__slots__` declaration does not contain a slot named - :attr:`__weakref__`, the type inherits its :attr:`tp_weaklistoffset` from its + :attr:`__weakref__`, the type inherits its :c:member:`~PyTypeObject.tp_weaklistoffset` from its base type. .. c:member:: getiterfunc PyTypeObject.tp_iter @@ -660,7 +660,7 @@ *NULL* too. Its presence signals that the instances of this type are iterators. - Iterator types should also define the :attr:`tp_iter` function, and that + Iterator types should also define the :c:member:`~PyTypeObject.tp_iter` function, and that function should return the iterator instance itself (not a new iterator instance). @@ -675,7 +675,7 @@ structures, declaring regular methods of this type. For each entry in the array, an entry is added to the type's dictionary (see - :attr:`tp_dict` below) containing a method descriptor. + :c:member:`~PyTypeObject.tp_dict` below) containing a method descriptor. This field is not inherited by subtypes (methods are inherited through a different mechanism). @@ -688,7 +688,7 @@ this type. For each entry in the array, an entry is added to the type's dictionary (see - :attr:`tp_dict` below) containing a member descriptor. + :c:member:`~PyTypeObject.tp_dict` below) containing a member descriptor. This field is not inherited by subtypes (members are inherited through a different mechanism). @@ -700,7 +700,7 @@ structures, declaring computed attributes of instances of this type. For each entry in the array, an entry is added to the type's dictionary (see - :attr:`tp_dict` below) containing a getset descriptor. + :c:member:`~PyTypeObject.tp_dict` below) containing a getset descriptor. This field is not inherited by subtypes (computed attributes are inherited through a different mechanism). @@ -748,7 +748,7 @@ .. warning:: It is not safe to use :c:func:`PyDict_SetItem` on or otherwise modify - :attr:`tp_dict` with the dictionary C-API. + :c:member:`~PyTypeObject.tp_dict` with the dictionary C-API. .. c:member:: descrgetfunc PyTypeObject.tp_descr_get @@ -784,7 +784,7 @@ the instance variable dictionary; this offset is used by :c:func:`PyObject_GenericGetAttr`. - Do not confuse this field with :attr:`tp_dict`; that is the dictionary for + Do not confuse this field with :c:member:`~PyTypeObject.tp_dict`; that is the dictionary for attributes of the type object itself. If the value of this field is greater than zero, it specifies the offset from @@ -793,20 +793,20 @@ offset is more expensive to use, and should only be used when the instance structure contains a variable-length part. This is used for example to add an instance variable dictionary to subtypes of :class:`str` or :class:`tuple`. Note - that the :attr:`tp_basicsize` field should account for the dictionary added to + that the :c:member:`~PyTypeObject.tp_basicsize` field should account for the dictionary added to the end in that case, even though the dictionary is not included in the basic object layout. On a system with a pointer size of 4 bytes, - :attr:`tp_dictoffset` should be set to ``-4`` to indicate that the dictionary is + :c:member:`~PyTypeObject.tp_dictoffset` should be set to ``-4`` to indicate that the dictionary is at the very end of the structure. The real dictionary offset in an instance can be computed from a negative - :attr:`tp_dictoffset` as follows:: + :c:member:`~PyTypeObject.tp_dictoffset` as follows:: dictoffset = tp_basicsize + abs(ob_size)*tp_itemsize + tp_dictoffset if dictoffset is not aligned on sizeof(void*): round up to sizeof(void*) - where :attr:`tp_basicsize`, :attr:`tp_itemsize` and :attr:`tp_dictoffset` are + where :c:member:`~PyTypeObject.tp_basicsize`, :c:member:`~PyTypeObject.tp_itemsize` and :c:member:`~PyTypeObject.tp_dictoffset` are taken from the type object, and :attr:`ob_size` is taken from the instance. The absolute value is taken because ints use the sign of :attr:`ob_size` to store the sign of the number. (There's never a need to do this calculation @@ -815,15 +815,15 @@ This field is inherited by subtypes, but see the rules listed below. A subtype may override this offset; this means that the subtype instances store the dictionary at a difference offset than the base type. Since the dictionary is - always found via :attr:`tp_dictoffset`, this should not be a problem. + always found via :c:member:`~PyTypeObject.tp_dictoffset`, this should not be a problem. When a type defined by a class statement has no :attr:`__slots__` declaration, and none of its base types has an instance variable dictionary, a dictionary - slot is added to the instance layout and the :attr:`tp_dictoffset` is set to + slot is added to the instance layout and the :c:member:`~PyTypeObject.tp_dictoffset` is set to that slot's offset. When a type defined by a class statement has a :attr:`__slots__` declaration, - the type inherits its :attr:`tp_dictoffset` from its base type. + the type inherits its :c:member:`~PyTypeObject.tp_dictoffset` from its base type. (Adding a slot named :attr:`__dict__` to the :attr:`__slots__` declaration does not have the expected effect, it just causes confusion. Maybe this should be @@ -847,12 +847,12 @@ arguments represent positional and keyword arguments of the call to :meth:`__init__`. - The :attr:`tp_init` function, if not *NULL*, is called when an instance is - created normally by calling its type, after the type's :attr:`tp_new` function - has returned an instance of the type. If the :attr:`tp_new` function returns an + The :c:member:`~PyTypeObject.tp_init` function, if not *NULL*, is called when an instance is + created normally by calling its type, after the type's :c:member:`~PyTypeObject.tp_new` function + has returned an instance of the type. If the :c:member:`~PyTypeObject.tp_new` function returns an instance of some other type that is not a subtype of the original type, no - :attr:`tp_init` function is called; if :attr:`tp_new` returns an instance of a - subtype of the original type, the subtype's :attr:`tp_init` is called. + :c:member:`~PyTypeObject.tp_init` function is called; if :c:member:`~PyTypeObject.tp_new` returns an instance of a + subtype of the original type, the subtype's :c:member:`~PyTypeObject.tp_init` is called. This field is inherited by subtypes. @@ -869,14 +869,14 @@ initialization. It should return a pointer to a block of memory of adequate length for the instance, suitably aligned, and initialized to zeros, but with :attr:`ob_refcnt` set to ``1`` and :attr:`ob_type` set to the type argument. If - the type's :attr:`tp_itemsize` is non-zero, the object's :attr:`ob_size` field + the type's :c:member:`~PyTypeObject.tp_itemsize` is non-zero, the object's :attr:`ob_size` field should be initialized to *nitems* and the length of the allocated memory block should be ``tp_basicsize + nitems*tp_itemsize``, rounded up to a multiple of ``sizeof(void*)``; otherwise, *nitems* is not used and the length of the block - should be :attr:`tp_basicsize`. + should be :c:member:`~PyTypeObject.tp_basicsize`. Do not use this function to do any other instance initialization, not even to - allocate additional memory; that should be done by :attr:`tp_new`. + allocate additional memory; that should be done by :c:member:`~PyTypeObject.tp_new`. This field is inherited by static subtypes, but not by dynamic subtypes (subtypes created by a class statement); in the latter, this field is always set @@ -898,20 +898,20 @@ The subtype argument is the type of the object being created; the *args* and *kwds* arguments represent positional and keyword arguments of the call to the - type. Note that subtype doesn't have to equal the type whose :attr:`tp_new` + type. Note that subtype doesn't have to equal the type whose :c:member:`~PyTypeObject.tp_new` function is called; it may be a subtype of that type (but not an unrelated type). - The :attr:`tp_new` function should call ``subtype->tp_alloc(subtype, nitems)`` + The :c:member:`~PyTypeObject.tp_new` function should call ``subtype->tp_alloc(subtype, nitems)`` to allocate space for the object, and then do only as much further initialization as is absolutely necessary. Initialization that can safely be - ignored or repeated should be placed in the :attr:`tp_init` handler. A good + ignored or repeated should be placed in the :c:member:`~PyTypeObject.tp_init` handler. A good rule of thumb is that for immutable types, all initialization should take place - in :attr:`tp_new`, while for mutable types, most initialization should be - deferred to :attr:`tp_init`. + in :c:member:`~PyTypeObject.tp_new`, while for mutable types, most initialization should be + deferred to :c:member:`~PyTypeObject.tp_init`. This field is inherited by subtypes, except it is not inherited by static types - whose :attr:`tp_base` is *NULL* or ``&PyBaseObject_Type``. + whose :c:member:`~PyTypeObject.tp_base` is *NULL* or ``&PyBaseObject_Type``. .. c:member:: destructor PyTypeObject.tp_free @@ -935,7 +935,7 @@ The garbage collector needs to know whether a particular object is collectible or not. Normally, it is sufficient to look at the object's type's - :attr:`tp_flags` field, and check the :const:`Py_TPFLAGS_HAVE_GC` flag bit. But + :c:member:`~PyTypeObject.tp_flags` field, and check the :const:`Py_TPFLAGS_HAVE_GC` flag bit. But some types have a mixture of statically and dynamically allocated instances, and the statically allocated instances are not collectible. Such types should define this function; it should return ``1`` for a collectible instance, and @@ -1006,7 +1006,7 @@ .. c:member:: PyTypeObject* PyTypeObject.tp_next - Pointer to the next type object with a non-zero :attr:`tp_allocs` field. + Pointer to the next type object with a non-zero :c:member:`~PyTypeObject.tp_allocs` field. Also, note that, in a garbage collected Python, tp_dealloc may be called from any Python thread, not just the thread which created the object (if the object @@ -1145,13 +1145,13 @@ This function is used by :c:func:`PySequence_Concat` and has the same signature. It is also used by the ``+`` operator, after trying the numeric - addition via the :attr:`tp_as_number.nb_add` slot. + addition via the :c:member:`~PyTypeObject.tp_as_number.nb_add` slot. .. c:member:: ssizeargfunc PySequenceMethods.sq_repeat This function is used by :c:func:`PySequence_Repeat` and has the same signature. It is also used by the ``*`` operator, after trying numeric - multiplication via the :attr:`tp_as_number.nb_mul` slot. + multiplication via the :c:member:`~PyTypeObject.tp_as_number.nb_mul` slot. .. c:member:: ssizeargfunc PySequenceMethods.sq_item diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -135,11 +135,11 @@ .. note:: If you want your type to be subclassable from Python, and your type has the same - :attr:`tp_basicsize` as its base type, you may have problems with multiple + :c:member:`~PyTypeObject.tp_basicsize` as its base type, you may have problems with multiple inheritance. A Python subclass of your type will have to list your type first in its :attr:`__bases__`, or else it will not be able to call your type's :meth:`__new__` method without getting an error. You can avoid this problem by - ensuring that your type has a larger value for :attr:`tp_basicsize` than its + ensuring that your type has a larger value for :c:member:`~PyTypeObject.tp_basicsize` than its base type does. Most of the time, this will be true anyway, because either your base type will be :class:`object`, or else you will be adding data members to your base type, and therefore increasing its size. @@ -159,7 +159,7 @@ All types should include this constant in their flags. It enables all of the members defined by the current version of Python. -We provide a doc string for the type in :attr:`tp_doc`. :: +We provide a doc string for the type in :c:member:`~PyTypeObject.tp_doc`. :: "Noddy objects", /* tp_doc */ @@ -168,12 +168,12 @@ the module. We'll expand this example later to have more interesting behavior. For now, all we want to be able to do is to create new :class:`Noddy` objects. -To enable object creation, we have to provide a :attr:`tp_new` implementation. +To enable object creation, we have to provide a :c:member:`~PyTypeObject.tp_new` implementation. In this case, we can just use the default implementation provided by the API function :c:func:`PyType_GenericNew`. We'd like to just assign this to the -:attr:`tp_new` slot, but we can't, for portability sake, On some platforms or +:c:member:`~PyTypeObject.tp_new` slot, but we can't, for portability sake, On some platforms or compilers, we can't statically initialize a structure member with a function -defined in another C module, so, instead, we'll assign the :attr:`tp_new` slot +defined in another C module, so, instead, we'll assign the :c:member:`~PyTypeObject.tp_new` slot in the module initialization function just before calling :c:func:`PyType_Ready`:: @@ -268,13 +268,13 @@ Py_TYPE(self)->tp_free((PyObject*)self); } -which is assigned to the :attr:`tp_dealloc` member:: +which is assigned to the :c:member:`~PyTypeObject.tp_dealloc` member:: (destructor)Noddy_dealloc, /*tp_dealloc*/ This method decrements the reference counts of the two Python attributes. We use :c:func:`Py_XDECREF` here because the :attr:`first` and :attr:`last` members -could be *NULL*. It then calls the :attr:`tp_free` member of the object's type +could be *NULL*. It then calls the :c:member:`~PyTypeObject.tp_free` member of the object's type to free the object's memory. Note that the object's type might not be :class:`NoddyType`, because the object may be an instance of a subclass. @@ -306,7 +306,7 @@ return (PyObject *)self; } -and install it in the :attr:`tp_new` member:: +and install it in the :c:member:`~PyTypeObject.tp_new` member:: Noddy_new, /* tp_new */ @@ -326,17 +326,17 @@ created. New methods always accept positional and keyword arguments, but they often ignore the arguments, leaving the argument handling to initializer methods. Note that if the type supports subclassing, the type passed may not be -the type being defined. The new method calls the :attr:`tp_alloc` slot to -allocate memory. We don't fill the :attr:`tp_alloc` slot ourselves. Rather +the type being defined. The new method calls the :c:member:`~PyTypeObject.tp_alloc` slot to +allocate memory. We don't fill the :c:member:`~PyTypeObject.tp_alloc` slot ourselves. Rather :c:func:`PyType_Ready` fills it for us by inheriting it from our base class, which is :class:`object` by default. Most types use the default allocation. .. note:: - If you are creating a co-operative :attr:`tp_new` (one that calls a base type's - :attr:`tp_new` or :meth:`__new__`), you must *not* try to determine what method + If you are creating a co-operative :c:member:`~PyTypeObject.tp_new` (one that calls a base type's + :c:member:`~PyTypeObject.tp_new` or :meth:`__new__`), you must *not* try to determine what method to call using method resolution order at runtime. Always statically determine - what type you are going to call, and call its :attr:`tp_new` directly, or via + what type you are going to call, and call its :c:member:`~PyTypeObject.tp_new` directly, or via ``type->tp_base->tp_new``. If you do not do this, Python subclasses of your type that also inherit from other Python-defined classes may not work correctly. (Specifically, you may not be able to create instances of such subclasses @@ -373,11 +373,11 @@ return 0; } -by filling the :attr:`tp_init` slot. :: +by filling the :c:member:`~PyTypeObject.tp_init` slot. :: (initproc)Noddy_init, /* tp_init */ -The :attr:`tp_init` slot is exposed in Python as the :meth:`__init__` method. It +The :c:member:`~PyTypeObject.tp_init` slot is exposed in Python as the :meth:`__init__` method. It is used to initialize an object after it's created. Unlike the new method, we can't guarantee that the initializer is called. The initializer isn't called when unpickling objects and it can be overridden. Our initializer accepts @@ -407,7 +407,7 @@ * when we know that deallocation of the object [#]_ will not cause any calls back into our type's code -* when decrementing a reference count in a :attr:`tp_dealloc` handler when +* when decrementing a reference count in a :c:member:`~PyTypeObject.tp_dealloc` handler when garbage-collections is not supported [#]_ We want to expose our instance variables as attributes. There are a @@ -423,7 +423,7 @@ {NULL} /* Sentinel */ }; -and put the definitions in the :attr:`tp_members` slot:: +and put the definitions in the :c:member:`~PyTypeObject.tp_members` slot:: Noddy_members, /* tp_members */ @@ -483,7 +483,7 @@ {NULL} /* Sentinel */ }; -and assign them to the :attr:`tp_methods` slot:: +and assign them to the :c:member:`~PyTypeObject.tp_methods` slot:: Noddy_methods, /* tp_methods */ @@ -578,7 +578,7 @@ {NULL} /* Sentinel */ }; -and register it in the :attr:`tp_getset` slot:: +and register it in the :c:member:`~PyTypeObject.tp_getset` slot:: Noddy_getseters, /* tp_getset */ @@ -595,7 +595,7 @@ {NULL} /* Sentinel */ }; -We also need to update the :attr:`tp_init` handler to only allow strings [#]_ to +We also need to update the :c:member:`~PyTypeObject.tp_init` handler to only allow strings [#]_ to be passed:: static int @@ -713,7 +713,7 @@ .. note:: - Note that the :attr:`tp_traverse` implementation must name its arguments exactly + Note that the :c:member:`~PyTypeObject.tp_traverse` implementation must name its arguments exactly *visit* and *arg* in order to use :c:func:`Py_VISIT`. This is to encourage uniformity across these boring implementations. @@ -750,7 +750,7 @@ reference count drops to zero, we might cause code to run that calls back into the object. In addition, because we now support garbage collection, we also have to worry about code being run that triggers garbage collection. If garbage -collection is run, our :attr:`tp_traverse` handler could get called. We can't +collection is run, our :c:member:`~PyTypeObject.tp_traverse` handler could get called. We can't take a chance of having :c:func:`Noddy_traverse` called when a member's reference count has dropped to zero and its value hasn't been set to *NULL*. @@ -770,8 +770,8 @@ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /* tp_flags */ -That's pretty much it. If we had written custom :attr:`tp_alloc` or -:attr:`tp_free` slots, we'd need to modify them for cyclic-garbage collection. +That's pretty much it. If we had written custom :c:member:`~PyTypeObject.tp_alloc` or +:c:member:`~PyTypeObject.tp_free` slots, we'd need to modify them for cyclic-garbage collection. Most extensions will use the versions automatically provided. @@ -830,8 +830,8 @@ This pattern is important when writing a type with custom :attr:`new` and :attr:`dealloc` methods. The :attr:`new` method should not actually create the -memory for the object with :attr:`tp_alloc`, that will be handled by the base -class when calling its :attr:`tp_new`. +memory for the object with :c:member:`~PyTypeObject.tp_alloc`, that will be handled by the base +class when calling its :c:member:`~PyTypeObject.tp_new`. When filling out the :c:func:`PyTypeObject` for the :class:`Shoddy` type, you see a slot for :c:func:`tp_base`. Due to cross platform compiler issues, you can't @@ -857,8 +857,8 @@ } Before calling :c:func:`PyType_Ready`, the type structure must have the -:attr:`tp_base` slot filled in. When we are deriving a new type, it is not -necessary to fill out the :attr:`tp_alloc` slot with :c:func:`PyType_GenericNew` +:c:member:`~PyTypeObject.tp_base` slot filled in. When we are deriving a new type, it is not +necessary to fill out the :c:member:`~PyTypeObject.tp_alloc` slot with :c:func:`PyType_GenericNew` -- the allocate function from the base type will be inherited. After that, calling :c:func:`PyType_Ready` and adding the type object to the @@ -901,7 +901,7 @@ These fields tell the runtime how much memory to allocate when new objects of this type are created. Python has some built-in support for variable length -structures (think: strings, lists) which is where the :attr:`tp_itemsize` field +structures (think: strings, lists) which is where the :c:member:`~PyTypeObject.tp_itemsize` field comes in. This will be dealt with later. :: char *tp_doc; @@ -997,7 +997,7 @@ reprfunc tp_repr; reprfunc tp_str; -The :attr:`tp_repr` handler should return a string object containing a +The :c:member:`~PyTypeObject.tp_repr` handler should return a string object containing a representation of the instance for which it is called. Here is a simple example:: @@ -1008,15 +1008,15 @@ obj->obj_UnderlyingDatatypePtr->size); } -If no :attr:`tp_repr` handler is specified, the interpreter will supply a -representation that uses the type's :attr:`tp_name` and a uniquely-identifying +If no :c:member:`~PyTypeObject.tp_repr` handler is specified, the interpreter will supply a +representation that uses the type's :c:member:`~PyTypeObject.tp_name` and a uniquely-identifying value for the object. -The :attr:`tp_str` handler is to :func:`str` what the :attr:`tp_repr` handler +The :c:member:`~PyTypeObject.tp_str` handler is to :func:`str` what the :c:member:`~PyTypeObject.tp_repr` handler described above is to :func:`repr`; that is, it is called when Python code calls :func:`str` on an instance of your object. Its implementation is very similar -to the :attr:`tp_repr` function, but the resulting string is intended for human -consumption. If :attr:`tp_str` is not specified, the :attr:`tp_repr` handler is +to the :c:member:`~PyTypeObject.tp_repr` function, but the resulting string is intended for human +consumption. If :c:member:`~PyTypeObject.tp_str` is not specified, the :c:member:`~PyTypeObject.tp_repr` handler is used instead. Here is a simple example:: @@ -1081,7 +1081,7 @@ type object. Each descriptor controls access to one attribute of the instance object. Each of the tables is optional; if all three are *NULL*, instances of the type will only have attributes that are inherited from their base type, and -should leave the :attr:`tp_getattro` and :attr:`tp_setattro` fields *NULL* as +should leave the :c:member:`~PyTypeObject.tp_getattro` and :c:member:`~PyTypeObject.tp_setattro` fields *NULL* as well, allowing the base type to handle attributes. The tables are declared as three fields of the type object:: @@ -1090,7 +1090,7 @@ struct PyMemberDef *tp_members; struct PyGetSetDef *tp_getset; -If :attr:`tp_methods` is not *NULL*, it must refer to an array of +If :c:member:`~PyTypeObject.tp_methods` is not *NULL*, it must refer to an array of :c:type:`PyMethodDef` structures. Each entry in the table is an instance of this structure:: @@ -1146,13 +1146,13 @@ single: WRITE_RESTRICTED single: RESTRICTED -An interesting advantage of using the :attr:`tp_members` table to build +An interesting advantage of using the :c:member:`~PyTypeObject.tp_members` table to build descriptors that are used at runtime is that any attribute defined this way can have an associated doc string simply by providing the text in the table. An application can use the introspection API to retrieve the descriptor from the class object, and get the doc string using its :attr:`__doc__` attribute. -As with the :attr:`tp_methods` table, a sentinel entry with a :attr:`name` value +As with the :c:member:`~PyTypeObject.tp_methods` table, a sentinel entry with a :attr:`name` value of *NULL* is required. .. XXX Descriptors need to be explained in more detail somewhere, but not here. @@ -1176,7 +1176,7 @@ called, so that if you do need to extend their functionality, you'll understand what needs to be done. -The :attr:`tp_getattr` handler is called when the object requires an attribute +The :c:member:`~PyTypeObject.tp_getattr` handler is called when the object requires an attribute look-up. It is called in the same situations where the :meth:`__getattr__` method of a class would be called. @@ -1196,11 +1196,11 @@ return NULL; } -The :attr:`tp_setattr` handler is called when the :meth:`__setattr__` or +The :c:member:`~PyTypeObject.tp_setattr` handler is called when the :meth:`__setattr__` or :meth:`__delattr__` method of a class instance would be called. When an attribute should be deleted, the third parameter will be *NULL*. Here is an example that simply raises an exception; if this were really all you wanted, the -:attr:`tp_setattr` handler should be set to *NULL*. :: +:c:member:`~PyTypeObject.tp_setattr` handler should be set to *NULL*. :: static int newdatatype_setattr(newdatatypeobject *obj, char *name, PyObject *v) @@ -1216,7 +1216,7 @@ richcmpfunc tp_richcompare; -The :attr:`tp_richcompare` handler is called when comparisons are needed. It is +The :c:member:`~PyTypeObject.tp_richcompare` handler is called when comparisons are needed. It is analogous to the :ref:`rich comparison methods `, like :meth:`__lt__`, and also called by :c:func:`PyObject_RichCompare` and :c:func:`PyObject_RichCompareBool`. @@ -1307,7 +1307,7 @@ This function is called when an instance of your data type is "called", for example, if ``obj1`` is an instance of your data type and the Python script -contains ``obj1('hello')``, the :attr:`tp_call` handler is invoked. +contains ``obj1('hello')``, the :c:member:`~PyTypeObject.tp_call` handler is invoked. This function takes three arguments: @@ -1394,7 +1394,7 @@ For an object to be weakly referencable, the extension must include a :c:type:`PyObject\*` field in the instance structure for the use of the weak reference mechanism; it must be initialized to *NULL* by the object's -constructor. It must also set the :attr:`tp_weaklistoffset` field of the +constructor. It must also set the :c:member:`~PyTypeObject.tp_weaklistoffset` field of the corresponding type object to the offset of the field. For example, the instance type is defined with the following structure:: @@ -1480,7 +1480,7 @@ .. [#] This is true when we know that the object is a basic type, like a string or a float. -.. [#] We relied on this in the :attr:`tp_dealloc` handler in this example, because our +.. [#] We relied on this in the :c:member:`~PyTypeObject.tp_dealloc` handler in this example, because our type doesn't support garbage collection. Even if a type supports garbage collection, there are calls that can be made to "untrack" the object from garbage collection, however, these calls are advanced and not covered here. diff --git a/Doc/library/gc.rst b/Doc/library/gc.rst --- a/Doc/library/gc.rst +++ b/Doc/library/gc.rst @@ -121,8 +121,8 @@ Return a list of objects directly referred to by any of the arguments. The referents returned are those objects visited by the arguments' C-level - :attr:`tp_traverse` methods (if any), and may not be all objects actually - directly reachable. :attr:`tp_traverse` methods are supported only by objects + :c:member:`~PyTypeObject.tp_traverse` methods (if any), and may not be all objects actually + directly reachable. :c:member:`~PyTypeObject.tp_traverse` methods are supported only by objects that support garbage collection, and are only required to visit objects that may be involved in a cycle. So, for example, if an integer is directly reachable from an argument, that integer object may or may not appear in the result list. diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -751,7 +751,7 @@ iterators for those iteration types. (An example of an object supporting multiple forms of iteration would be a tree structure which supports both breadth-first and depth-first traversal.) This method corresponds to the - :attr:`tp_iter` slot of the type structure for Python objects in the Python/C + :c:member:`~PyTypeObject.tp_iter` slot of the type structure for Python objects in the Python/C API. The iterator objects themselves are required to support the following two @@ -762,7 +762,7 @@ Return the iterator object itself. This is required to allow both containers and iterators to be used with the :keyword:`for` and :keyword:`in` statements. - This method corresponds to the :attr:`tp_iter` slot of the type structure for + This method corresponds to the :c:member:`~PyTypeObject.tp_iter` slot of the type structure for Python objects in the Python/C API. @@ -770,7 +770,7 @@ Return the next item from the container. If there are no further items, raise the :exc:`StopIteration` exception. This method corresponds to the - :attr:`tp_iternext` slot of the type structure for Python objects in the + :c:member:`~PyTypeObject.tp_iternext` slot of the type structure for Python objects in the Python/C API. Python defines several iterator objects to support iteration over general and diff --git a/Doc/whatsnew/2.2.rst b/Doc/whatsnew/2.2.rst --- a/Doc/whatsnew/2.2.rst +++ b/Doc/whatsnew/2.2.rst @@ -450,9 +450,9 @@ Python classes can define an :meth:`__iter__` method, which should create and return a new iterator for the object; if the object is its own iterator, this method can just return ``self``. In particular, iterators will usually be their -own iterators. Extension types implemented in C can implement a :attr:`tp_iter` +own iterators. Extension types implemented in C can implement a :c:member:`~PyTypeObject.tp_iter` function in order to return an iterator, and extension types that want to behave -as iterators can define a :attr:`tp_iternext` function. +as iterators can define a :c:member:`~PyTypeObject.tp_iternext` function. So, after all this, what do iterators actually do? They have one required method, :meth:`next`, which takes no arguments and returns the next value. When @@ -478,7 +478,7 @@ In 2.2, Python's :keyword:`for` statement no longer expects a sequence; it expects something for which :func:`iter` will return an iterator. For backward compatibility and convenience, an iterator is automatically constructed for -sequences that don't implement :meth:`__iter__` or a :attr:`tp_iter` slot, so +sequences that don't implement :meth:`__iter__` or a :c:member:`~PyTypeObject.tp_iter` slot, so ``for i in [1,2,3]`` will still work. Wherever the Python interpreter loops over a sequence, it's been changed to use the iterator protocol. This means you can do things like this:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 21:15:00 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 1 Aug 2013 21:15:00 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318589=3A_fix_hyperlinking_of_type_slots_=28tp?= =?utf-8?b?Xyop?= Message-ID: <3c5h5w64bqz7Ljv@mail.python.org> http://hg.python.org/cpython/rev/36ff479e429c changeset: 84956:36ff479e429c parent: 84954:1c9aa4f68f2b parent: 84955:a381721299a3 user: Antoine Pitrou date: Thu Aug 01 21:14:43 2013 +0200 summary: Issue #18589: fix hyperlinking of type slots (tp_*) files: Doc/c-api/allocation.rst | 6 +- Doc/c-api/exceptions.rst | 12 +- Doc/c-api/gcsupport.rst | 28 +- Doc/c-api/type.rst | 10 +- Doc/c-api/typeobj.rst | 224 ++++++++++++------------ Doc/extending/newtypes.rst | 106 +++++----- Doc/library/gc.rst | 4 +- Doc/library/stdtypes.rst | 6 +- Doc/whatsnew/2.2.rst | 6 +- 9 files changed, 201 insertions(+), 201 deletions(-) diff --git a/Doc/c-api/allocation.rst b/Doc/c-api/allocation.rst --- a/Doc/c-api/allocation.rst +++ b/Doc/c-api/allocation.rst @@ -32,7 +32,7 @@ Allocate a new Python object using the C structure type *TYPE* and the Python type object *type*. Fields not defined by the Python object header are not initialized; the object's reference count will be one. The size of - the memory allocation is determined from the :attr:`tp_basicsize` field of + the memory allocation is determined from the :c:member:`~PyTypeObject.tp_basicsize` field of the type object. @@ -41,7 +41,7 @@ Allocate a new Python object using the C structure type *TYPE* and the Python type object *type*. Fields not defined by the Python object header are not initialized. The allocated memory allows for the *TYPE* structure - plus *size* fields of the size given by the :attr:`tp_itemsize` field of + plus *size* fields of the size given by the :c:member:`~PyTypeObject.tp_itemsize` field of *type*. This is useful for implementing objects like tuples, which are able to determine their size at construction time. Embedding the array of fields into the same allocation decreases the number of allocations, @@ -52,7 +52,7 @@ Releases memory allocated to an object using :c:func:`PyObject_New` or :c:func:`PyObject_NewVar`. This is normally called from the - :attr:`tp_dealloc` handler specified in the object's type. The fields of + :c:member:`~PyTypeObject.tp_dealloc` handler specified in the object's type. The fields of the object should not be accessed after this call as the memory is no longer a valid Python object. diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst --- a/Doc/c-api/exceptions.rst +++ b/Doc/c-api/exceptions.rst @@ -607,28 +607,28 @@ Ends a :c:func:`Py_EnterRecursiveCall`. Must be called once for each *successful* invocation of :c:func:`Py_EnterRecursiveCall`. -Properly implementing :attr:`tp_repr` for container types requires +Properly implementing :c:member:`~PyTypeObject.tp_repr` for container types requires special recursion handling. In addition to protecting the stack, -:attr:`tp_repr` also needs to track objects to prevent cycles. The +:c:member:`~PyTypeObject.tp_repr` also needs to track objects to prevent cycles. The following two functions facilitate this functionality. Effectively, these are the C equivalent to :func:`reprlib.recursive_repr`. .. c:function:: int Py_ReprEnter(PyObject *object) - Called at the beginning of the :attr:`tp_repr` implementation to + Called at the beginning of the :c:member:`~PyTypeObject.tp_repr` implementation to detect cycles. If the object has already been processed, the function returns a - positive integer. In that case the :attr:`tp_repr` implementation + positive integer. In that case the :c:member:`~PyTypeObject.tp_repr` implementation should return a string object indicating a cycle. As examples, :class:`dict` objects return ``{...}`` and :class:`list` objects return ``[...]``. The function will return a negative integer if the recursion limit - is reached. In that case the :attr:`tp_repr` implementation should + is reached. In that case the :c:member:`~PyTypeObject.tp_repr` implementation should typically return ``NULL``. - Otherwise, the function returns zero and the :attr:`tp_repr` + Otherwise, the function returns zero and the :c:member:`~PyTypeObject.tp_repr` implementation can continue normally. .. c:function:: void Py_ReprLeave(PyObject *object) diff --git a/Doc/c-api/gcsupport.rst b/Doc/c-api/gcsupport.rst --- a/Doc/c-api/gcsupport.rst +++ b/Doc/c-api/gcsupport.rst @@ -12,10 +12,10 @@ or strings), do not need to provide any explicit support for garbage collection. -To create a container type, the :attr:`tp_flags` field of the type object must +To create a container type, the :c:member:`~PyTypeObject.tp_flags` field of the type object must include the :const:`Py_TPFLAGS_HAVE_GC` and provide an implementation of the -:attr:`tp_traverse` handler. If instances of the type are mutable, a -:attr:`tp_clear` implementation must also be provided. +:c:member:`~PyTypeObject.tp_traverse` handler. If instances of the type are mutable, a +:c:member:`~PyTypeObject.tp_clear` implementation must also be provided. .. data:: Py_TPFLAGS_HAVE_GC @@ -57,7 +57,7 @@ Adds the object *op* to the set of container objects tracked by the collector. The collector can run at unexpected times so objects must be valid while being tracked. This should be called once all the fields - followed by the :attr:`tp_traverse` handler become valid, usually near the + followed by the :c:member:`~PyTypeObject.tp_traverse` handler become valid, usually near the end of the constructor. @@ -86,8 +86,8 @@ Remove the object *op* from the set of container objects tracked by the collector. Note that :c:func:`PyObject_GC_Track` can be called again on this object to add it back to the set of tracked objects. The deallocator - (:attr:`tp_dealloc` handler) should call this for the object before any of - the fields used by the :attr:`tp_traverse` handler become invalid. + (:c:member:`~PyTypeObject.tp_dealloc` handler) should call this for the object before any of + the fields used by the :c:member:`~PyTypeObject.tp_traverse` handler become invalid. .. c:function:: void _PyObject_GC_UNTRACK(PyObject *op) @@ -95,19 +95,19 @@ A macro version of :c:func:`PyObject_GC_UnTrack`. It should not be used for extension modules. -The :attr:`tp_traverse` handler accepts a function parameter of this type: +The :c:member:`~PyTypeObject.tp_traverse` handler accepts a function parameter of this type: .. c:type:: int (*visitproc)(PyObject *object, void *arg) - Type of the visitor function passed to the :attr:`tp_traverse` handler. + Type of the visitor function passed to the :c:member:`~PyTypeObject.tp_traverse` handler. The function should be called with an object to traverse as *object* and - the third parameter to the :attr:`tp_traverse` handler as *arg*. The + the third parameter to the :c:member:`~PyTypeObject.tp_traverse` handler as *arg*. The Python core uses several visitor functions to implement cyclic garbage detection; it's not expected that users will need to write their own visitor functions. -The :attr:`tp_traverse` handler must have the following type: +The :c:member:`~PyTypeObject.tp_traverse` handler must have the following type: .. c:type:: int (*traverseproc)(PyObject *self, visitproc visit, void *arg) @@ -119,15 +119,15 @@ object argument. If *visit* returns a non-zero value that value should be returned immediately. -To simplify writing :attr:`tp_traverse` handlers, a :c:func:`Py_VISIT` macro is -provided. In order to use this macro, the :attr:`tp_traverse` implementation +To simplify writing :c:member:`~PyTypeObject.tp_traverse` handlers, a :c:func:`Py_VISIT` macro is +provided. In order to use this macro, the :c:member:`~PyTypeObject.tp_traverse` implementation must name its arguments exactly *visit* and *arg*: .. c:function:: void Py_VISIT(PyObject *o) Call the *visit* callback, with arguments *o* and *arg*. If *visit* returns - a non-zero value, then return it. Using this macro, :attr:`tp_traverse` + a non-zero value, then return it. Using this macro, :c:member:`~PyTypeObject.tp_traverse` handlers look like:: static int @@ -138,7 +138,7 @@ return 0; } -The :attr:`tp_clear` handler must be of the :c:type:`inquiry` type, or *NULL* +The :c:member:`~PyTypeObject.tp_clear` handler must be of the :c:type:`inquiry` type, or *NULL* if the object is immutable. diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -37,10 +37,10 @@ .. c:function:: long PyType_GetFlags(PyTypeObject* type) - Return the :attr:`tp_flags` member of *type*. This function is primarily + Return the :c:member:`~PyTypeObject.tp_flags` member of *type*. This function is primarily meant for use with `Py_LIMITED_API`; the individual flag bits are guaranteed to be stable across Python releases, but access to - :attr:`tp_flags` itself is not part of the limited API. + :c:member:`~PyTypeObject.tp_flags` itself is not part of the limited API. .. versionadded:: 3.2 @@ -70,14 +70,14 @@ .. c:function:: PyObject* PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems) - Generic handler for the :attr:`tp_alloc` slot of a type object. Use + Generic handler for the :c:member:`~PyTypeObject.tp_alloc` slot of a type object. Use Python's default memory allocation mechanism to allocate a new instance and initialize all its contents to *NULL*. .. c:function:: PyObject* PyType_GenericNew(PyTypeObject *type, PyObject *args, PyObject *kwds) - Generic handler for the :attr:`tp_new` slot of a type object. Create a - new instance using the type's :attr:`tp_alloc` slot. + Generic handler for the :c:member:`~PyTypeObject.tp_new` slot of a type object. Create a + new instance using the type's :c:member:`~PyTypeObject.tp_alloc` slot. .. c:function:: int PyType_Ready(PyTypeObject *type) diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -35,7 +35,7 @@ The type object structure extends the :c:type:`PyVarObject` structure. The :attr:`ob_size` field is used for dynamic types (created by :func:`type_new`, usually called from a class statement). Note that :c:data:`PyType_Type` (the -metatype) initializes :attr:`tp_itemsize`, which means that its instances (i.e. +metatype) initializes :c:member:`~PyTypeObject.tp_itemsize`, which means that its instances (i.e. type objects) *must* have the :attr:`ob_size` field. @@ -102,7 +102,7 @@ should be just the type name. If the module is a submodule of a package, the full package name is part of the full module name. For example, a type named :class:`T` defined in module :mod:`M` in subpackage :mod:`Q` in package :mod:`P` - should have the :attr:`tp_name` initializer ``"P.Q.M.T"``. + should have the :c:member:`~PyTypeObject.tp_name` initializer ``"P.Q.M.T"``. For dynamically allocated type objects, this should just be the type name, and the module name explicitly stored in the type dict as the value for key @@ -113,7 +113,7 @@ attribute, and everything after the last dot is made accessible as the :attr:`__name__` attribute. - If no dot is present, the entire :attr:`tp_name` field is made accessible as the + If no dot is present, the entire :c:member:`~PyTypeObject.tp_name` field is made accessible as the :attr:`__name__` attribute, and the :attr:`__module__` attribute is undefined (unless explicitly set in the dictionary, as explained above). This means your type will be impossible to pickle. @@ -127,13 +127,13 @@ These fields allow calculating the size in bytes of instances of the type. There are two kinds of types: types with fixed-length instances have a zero - :attr:`tp_itemsize` field, types with variable-length instances have a non-zero - :attr:`tp_itemsize` field. For a type with fixed-length instances, all - instances have the same size, given in :attr:`tp_basicsize`. + :c:member:`~PyTypeObject.tp_itemsize` field, types with variable-length instances have a non-zero + :c:member:`~PyTypeObject.tp_itemsize` field. For a type with fixed-length instances, all + instances have the same size, given in :c:member:`~PyTypeObject.tp_basicsize`. For a type with variable-length instances, the instances must have an - :attr:`ob_size` field, and the instance size is :attr:`tp_basicsize` plus N - times :attr:`tp_itemsize`, where N is the "length" of the object. The value of + :attr:`ob_size` field, and the instance size is :c:member:`~PyTypeObject.tp_basicsize` plus N + times :c:member:`~PyTypeObject.tp_itemsize`, where N is the "length" of the object. The value of N is typically stored in the instance's :attr:`ob_size` field. There are exceptions: for example, ints use a negative :attr:`ob_size` to indicate a negative number, and N is ``abs(ob_size)`` there. Also, the presence of an @@ -146,20 +146,20 @@ :c:macro:`PyObject_HEAD` or :c:macro:`PyObject_VAR_HEAD` (whichever is used to declare the instance struct) and this in turn includes the :attr:`_ob_prev` and :attr:`_ob_next` fields if they are present. This means that the only correct - way to get an initializer for the :attr:`tp_basicsize` is to use the + way to get an initializer for the :c:member:`~PyTypeObject.tp_basicsize` is to use the ``sizeof`` operator on the struct used to declare the instance layout. The basic size does not include the GC header size. These fields are inherited separately by subtypes. If the base type has a - non-zero :attr:`tp_itemsize`, it is generally not safe to set - :attr:`tp_itemsize` to a different non-zero value in a subtype (though this + non-zero :c:member:`~PyTypeObject.tp_itemsize`, it is generally not safe to set + :c:member:`~PyTypeObject.tp_itemsize` to a different non-zero value in a subtype (though this depends on the implementation of the base type). A note about alignment: if the variable items require a particular alignment, - this should be taken care of by the value of :attr:`tp_basicsize`. Example: - suppose a type implements an array of ``double``. :attr:`tp_itemsize` is + this should be taken care of by the value of :c:member:`~PyTypeObject.tp_basicsize`. Example: + suppose a type implements an array of ``double``. :c:member:`~PyTypeObject.tp_itemsize` is ``sizeof(double)``. It is the programmer's responsibility that - :attr:`tp_basicsize` is a multiple of ``sizeof(double)`` (assuming this is the + :c:member:`~PyTypeObject.tp_basicsize` is a multiple of ``sizeof(double)`` (assuming this is the alignment requirement for ``double``). @@ -175,10 +175,10 @@ destructor function should free all references which the instance owns, free all memory buffers owned by the instance (using the freeing function corresponding to the allocation function used to allocate the buffer), and finally (as its - last action) call the type's :attr:`tp_free` function. If the type is not + last action) call the type's :c:member:`~PyTypeObject.tp_free` function. If the type is not subtypable (doesn't have the :const:`Py_TPFLAGS_BASETYPE` flag bit set), it is permissible to call the object deallocator directly instead of via - :attr:`tp_free`. The object deallocator should be the one used to allocate the + :c:member:`~PyTypeObject.tp_free`. The object deallocator should be the one used to allocate the instance; this is normally :c:func:`PyObject_Del` if the instance was allocated using :c:func:`PyObject_New` or :c:func:`PyObject_VarNew`, or :c:func:`PyObject_GC_Del` if the instance was allocated using @@ -193,25 +193,25 @@ The print function is only called when the instance is printed to a *real* file; when it is printed to a pseudo-file (like a :class:`StringIO` instance), the - instance's :attr:`tp_repr` or :attr:`tp_str` function is called to convert it to - a string. These are also called when the type's :attr:`tp_print` field is - *NULL*. A type should never implement :attr:`tp_print` in a way that produces - different output than :attr:`tp_repr` or :attr:`tp_str` would. + instance's :c:member:`~PyTypeObject.tp_repr` or :c:member:`~PyTypeObject.tp_str` function is called to convert it to + a string. These are also called when the type's :c:member:`~PyTypeObject.tp_print` field is + *NULL*. A type should never implement :c:member:`~PyTypeObject.tp_print` in a way that produces + different output than :c:member:`~PyTypeObject.tp_repr` or :c:member:`~PyTypeObject.tp_str` would. The print function is called with the same signature as :c:func:`PyObject_Print`: ``int tp_print(PyObject *self, FILE *file, int flags)``. The *self* argument is the instance to be printed. The *file* argument is the stdio file to which it is to be printed. The *flags* argument is composed of flag bits. The only flag bit currently defined is :const:`Py_PRINT_RAW`. When the :const:`Py_PRINT_RAW` - flag bit is set, the instance should be printed the same way as :attr:`tp_str` + flag bit is set, the instance should be printed the same way as :c:member:`~PyTypeObject.tp_str` would format it; when the :const:`Py_PRINT_RAW` flag bit is clear, the instance - should be printed the same was as :attr:`tp_repr` would format it. It should + should be printed the same was as :c:member:`~PyTypeObject.tp_repr` would format it. It should return ``-1`` and set an exception condition when an error occurred during the comparison. - It is possible that the :attr:`tp_print` field will be deprecated. In any case, - it is recommended not to define :attr:`tp_print`, but instead to rely on - :attr:`tp_repr` and :attr:`tp_str` for printing. + It is possible that the :c:member:`~PyTypeObject.tp_print` field will be deprecated. In any case, + it is recommended not to define :c:member:`~PyTypeObject.tp_print`, but instead to rely on + :c:member:`~PyTypeObject.tp_repr` and :c:member:`~PyTypeObject.tp_str` for printing. This field is inherited by subtypes. @@ -221,13 +221,13 @@ An optional pointer to the get-attribute-string function. This field is deprecated. When it is defined, it should point to a function - that acts the same as the :attr:`tp_getattro` function, but taking a C string + that acts the same as the :c:member:`~PyTypeObject.tp_getattro` function, but taking a C string instead of a Python string object to give the attribute name. The signature is the same as for :c:func:`PyObject_GetAttrString`. - This field is inherited by subtypes together with :attr:`tp_getattro`: a subtype - inherits both :attr:`tp_getattr` and :attr:`tp_getattro` from its base type when - the subtype's :attr:`tp_getattr` and :attr:`tp_getattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_getattro`: a subtype + inherits both :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` are both *NULL*. .. c:member:: setattrfunc PyTypeObject.tp_setattr @@ -235,13 +235,13 @@ An optional pointer to the set-attribute-string function. This field is deprecated. When it is defined, it should point to a function - that acts the same as the :attr:`tp_setattro` function, but taking a C string + that acts the same as the :c:member:`~PyTypeObject.tp_setattro` function, but taking a C string instead of a Python string object to give the attribute name. The signature is the same as for :c:func:`PyObject_SetAttrString`. - This field is inherited by subtypes together with :attr:`tp_setattro`: a subtype - inherits both :attr:`tp_setattr` and :attr:`tp_setattro` from its base type when - the subtype's :attr:`tp_setattr` and :attr:`tp_setattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_setattro`: a subtype + inherits both :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` are both *NULL*. .. c:member:: void* PyTypeObject.tp_reserved @@ -275,7 +275,7 @@ objects which implement the number protocol. These fields are documented in :ref:`number-structs`. - The :attr:`tp_as_number` field is not inherited, but the contained fields are + The :c:member:`~PyTypeObject.tp_as_number` field is not inherited, but the contained fields are inherited individually. @@ -285,7 +285,7 @@ objects which implement the sequence protocol. These fields are documented in :ref:`sequence-structs`. - The :attr:`tp_as_sequence` field is not inherited, but the contained fields + The :c:member:`~PyTypeObject.tp_as_sequence` field is not inherited, but the contained fields are inherited individually. @@ -295,7 +295,7 @@ objects which implement the mapping protocol. These fields are documented in :ref:`mapping-structs`. - The :attr:`tp_as_mapping` field is not inherited, but the contained fields + The :c:member:`~PyTypeObject.tp_as_mapping` field is not inherited, but the contained fields are inherited individually. @@ -323,9 +323,9 @@ object raises :exc:`TypeError`. This field is inherited by subtypes together with - :attr:`tp_richcompare`: a subtype inherits both of - :attr:`tp_richcompare` and :attr:`tp_hash`, when the subtype's - :attr:`tp_richcompare` and :attr:`tp_hash` are both *NULL*. + :c:member:`~PyTypeObject.tp_richcompare`: a subtype inherits both of + :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash`, when the subtype's + :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash` are both *NULL*. .. c:member:: ternaryfunc PyTypeObject.tp_call @@ -363,9 +363,9 @@ convenient to set this field to :c:func:`PyObject_GenericGetAttr`, which implements the normal way of looking for object attributes. - This field is inherited by subtypes together with :attr:`tp_getattr`: a subtype - inherits both :attr:`tp_getattr` and :attr:`tp_getattro` from its base type when - the subtype's :attr:`tp_getattr` and :attr:`tp_getattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_getattr`: a subtype + inherits both :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` are both *NULL*. .. c:member:: setattrofunc PyTypeObject.tp_setattro @@ -376,9 +376,9 @@ convenient to set this field to :c:func:`PyObject_GenericSetAttr`, which implements the normal way of setting object attributes. - This field is inherited by subtypes together with :attr:`tp_setattr`: a subtype - inherits both :attr:`tp_setattr` and :attr:`tp_setattro` from its base type when - the subtype's :attr:`tp_setattr` and :attr:`tp_setattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_setattr`: a subtype + inherits both :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` are both *NULL*. .. c:member:: PyBufferProcs* PyTypeObject.tp_as_buffer @@ -387,7 +387,7 @@ which implement the buffer interface. These fields are documented in :ref:`buffer-structs`. - The :attr:`tp_as_buffer` field is not inherited, but the contained fields are + The :c:member:`~PyTypeObject.tp_as_buffer` field is not inherited, but the contained fields are inherited individually. @@ -396,8 +396,8 @@ This field is a bit mask of various flags. Some flags indicate variant semantics for certain situations; others are used to indicate that certain fields in the type object (or in the extension structures referenced via - :attr:`tp_as_number`, :attr:`tp_as_sequence`, :attr:`tp_as_mapping`, and - :attr:`tp_as_buffer`) that were historically not always present are valid; if + :c:member:`~PyTypeObject.tp_as_number`, :c:member:`~PyTypeObject.tp_as_sequence`, :c:member:`~PyTypeObject.tp_as_mapping`, and + :c:member:`~PyTypeObject.tp_as_buffer`) that were historically not always present are valid; if such a flag bit is clear, the type fields it guards must not be accessed and must be considered to have a zero or *NULL* value instead. @@ -407,13 +407,13 @@ inherited if the extension structure is inherited, i.e. the base type's value of the flag bit is copied into the subtype together with a pointer to the extension structure. The :const:`Py_TPFLAGS_HAVE_GC` flag bit is inherited together with - the :attr:`tp_traverse` and :attr:`tp_clear` fields, i.e. if the + the :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` fields, i.e. if the :const:`Py_TPFLAGS_HAVE_GC` flag bit is clear in the subtype and the - :attr:`tp_traverse` and :attr:`tp_clear` fields in the subtype exist and have + :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` fields in the subtype exist and have *NULL* values. The following bit masks are currently defined; these can be ORed together using - the ``|`` operator to form the value of the :attr:`tp_flags` field. The macro + the ``|`` operator to form the value of the :c:member:`~PyTypeObject.tp_flags` field. The macro :c:func:`PyType_HasFeature` takes a type and a flags value, *tp* and *f*, and checks whether ``tp->tp_flags & f`` is non-zero. @@ -453,7 +453,7 @@ is set, instances must be created using :c:func:`PyObject_GC_New` and destroyed using :c:func:`PyObject_GC_Del`. More information in section :ref:`supporting-cycle-detection`. This bit also implies that the - GC-related fields :attr:`tp_traverse` and :attr:`tp_clear` are present in + GC-related fields :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` are present in the type object. @@ -467,7 +467,7 @@ .. data:: Py_TPFLAGS_HAVE_FINALIZE - This bit is set when the :attr:`tp_finalize` slot is present in the + This bit is set when the :c:member:`~PyTypeObject.tp_finalize` slot is present in the type structure. .. versionadded:: 3.4 @@ -489,8 +489,8 @@ about Python's garbage collection scheme can be found in section :ref:`supporting-cycle-detection`. - The :attr:`tp_traverse` pointer is used by the garbage collector to detect - reference cycles. A typical implementation of a :attr:`tp_traverse` function + The :c:member:`~PyTypeObject.tp_traverse` pointer is used by the garbage collector to detect + reference cycles. A typical implementation of a :c:member:`~PyTypeObject.tp_traverse` function simply calls :c:func:`Py_VISIT` on each of the instance's members that are Python objects. For example, this is function :c:func:`local_traverse` from the :mod:`_thread` extension module:: @@ -516,9 +516,9 @@ :c:func:`local_traverse` to have these specific names; don't name them just anything. - This field is inherited by subtypes together with :attr:`tp_clear` and the - :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :attr:`tp_traverse`, and - :attr:`tp_clear` are all inherited from the base type if they are all zero in + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_clear` and the + :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :c:member:`~PyTypeObject.tp_traverse`, and + :c:member:`~PyTypeObject.tp_clear` are all inherited from the base type if they are all zero in the subtype. @@ -527,17 +527,17 @@ An optional pointer to a clear function for the garbage collector. This is only used if the :const:`Py_TPFLAGS_HAVE_GC` flag bit is set. - The :attr:`tp_clear` member function is used to break reference cycles in cyclic - garbage detected by the garbage collector. Taken together, all :attr:`tp_clear` + The :c:member:`~PyTypeObject.tp_clear` member function is used to break reference cycles in cyclic + garbage detected by the garbage collector. Taken together, all :c:member:`~PyTypeObject.tp_clear` functions in the system must combine to break all reference cycles. This is - subtle, and if in any doubt supply a :attr:`tp_clear` function. For example, - the tuple type does not implement a :attr:`tp_clear` function, because it's + subtle, and if in any doubt supply a :c:member:`~PyTypeObject.tp_clear` function. For example, + the tuple type does not implement a :c:member:`~PyTypeObject.tp_clear` function, because it's possible to prove that no reference cycle can be composed entirely of tuples. - Therefore the :attr:`tp_clear` functions of other types must be sufficient to + Therefore the :c:member:`~PyTypeObject.tp_clear` functions of other types must be sufficient to break any cycle containing a tuple. This isn't immediately obvious, and there's - rarely a good reason to avoid implementing :attr:`tp_clear`. + rarely a good reason to avoid implementing :c:member:`~PyTypeObject.tp_clear`. - Implementations of :attr:`tp_clear` should drop the instance's references to + Implementations of :c:member:`~PyTypeObject.tp_clear` should drop the instance's references to those of its members that may be Python objects, and set its pointers to those members to *NULL*, as in the following example:: @@ -562,18 +562,18 @@ so that *self* knows the contained object can no longer be used. The :c:func:`Py_CLEAR` macro performs the operations in a safe order. - Because the goal of :attr:`tp_clear` functions is to break reference cycles, + Because the goal of :c:member:`~PyTypeObject.tp_clear` functions is to break reference cycles, it's not necessary to clear contained objects like Python strings or Python integers, which can't participate in reference cycles. On the other hand, it may be convenient to clear all contained Python objects, and write the type's - :attr:`tp_dealloc` function to invoke :attr:`tp_clear`. + :c:member:`~PyTypeObject.tp_dealloc` function to invoke :c:member:`~PyTypeObject.tp_clear`. More information about Python's garbage collection scheme can be found in section :ref:`supporting-cycle-detection`. - This field is inherited by subtypes together with :attr:`tp_traverse` and the - :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :attr:`tp_traverse`, and - :attr:`tp_clear` are all inherited from the base type if they are all zero in + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_traverse` and the + :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :c:member:`~PyTypeObject.tp_traverse`, and + :c:member:`~PyTypeObject.tp_clear` are all inherited from the base type if they are all zero in the subtype. @@ -593,13 +593,13 @@ comparisons makes sense (e.g. ``==`` and ``!=``, but not ``<`` and friends), directly raise :exc:`TypeError` in the rich comparison function. - This field is inherited by subtypes together with :attr:`tp_hash`: - a subtype inherits :attr:`tp_richcompare` and :attr:`tp_hash` when - the subtype's :attr:`tp_richcompare` and :attr:`tp_hash` are both + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_hash`: + a subtype inherits :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash` when + the subtype's :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash` are both *NULL*. The following constants are defined to be used as the third argument for - :attr:`tp_richcompare` and for :c:func:`PyObject_RichCompare`: + :c:member:`~PyTypeObject.tp_richcompare` and for :c:func:`PyObject_RichCompare`: +----------------+------------+ | Constant | Comparison | @@ -627,26 +627,26 @@ instance structure needs to include a field of type :c:type:`PyObject\*` which is initialized to *NULL*. - Do not confuse this field with :attr:`tp_weaklist`; that is the list head for + Do not confuse this field with :c:member:`~PyTypeObject.tp_weaklist`; that is the list head for weak references to the type object itself. This field is inherited by subtypes, but see the rules listed below. A subtype may override this offset; this means that the subtype uses a different weak reference list head than the base type. Since the list head is always found via - :attr:`tp_weaklistoffset`, this should not be a problem. + :c:member:`~PyTypeObject.tp_weaklistoffset`, this should not be a problem. When a type defined by a class statement has no :attr:`__slots__` declaration, and none of its base types are weakly referenceable, the type is made weakly referenceable by adding a weak reference list head slot to the instance layout - and setting the :attr:`tp_weaklistoffset` of that slot's offset. + and setting the :c:member:`~PyTypeObject.tp_weaklistoffset` of that slot's offset. When a type's :attr:`__slots__` declaration contains a slot named :attr:`__weakref__`, that slot becomes the weak reference list head for instances of the type, and the slot's offset is stored in the type's - :attr:`tp_weaklistoffset`. + :c:member:`~PyTypeObject.tp_weaklistoffset`. When a type's :attr:`__slots__` declaration does not contain a slot named - :attr:`__weakref__`, the type inherits its :attr:`tp_weaklistoffset` from its + :attr:`__weakref__`, the type inherits its :c:member:`~PyTypeObject.tp_weaklistoffset` from its base type. .. c:member:: getiterfunc PyTypeObject.tp_iter @@ -668,7 +668,7 @@ *NULL* too. Its presence signals that the instances of this type are iterators. - Iterator types should also define the :attr:`tp_iter` function, and that + Iterator types should also define the :c:member:`~PyTypeObject.tp_iter` function, and that function should return the iterator instance itself (not a new iterator instance). @@ -683,7 +683,7 @@ structures, declaring regular methods of this type. For each entry in the array, an entry is added to the type's dictionary (see - :attr:`tp_dict` below) containing a method descriptor. + :c:member:`~PyTypeObject.tp_dict` below) containing a method descriptor. This field is not inherited by subtypes (methods are inherited through a different mechanism). @@ -696,7 +696,7 @@ this type. For each entry in the array, an entry is added to the type's dictionary (see - :attr:`tp_dict` below) containing a member descriptor. + :c:member:`~PyTypeObject.tp_dict` below) containing a member descriptor. This field is not inherited by subtypes (members are inherited through a different mechanism). @@ -708,7 +708,7 @@ structures, declaring computed attributes of instances of this type. For each entry in the array, an entry is added to the type's dictionary (see - :attr:`tp_dict` below) containing a getset descriptor. + :c:member:`~PyTypeObject.tp_dict` below) containing a getset descriptor. This field is not inherited by subtypes (computed attributes are inherited through a different mechanism). @@ -756,7 +756,7 @@ .. warning:: It is not safe to use :c:func:`PyDict_SetItem` on or otherwise modify - :attr:`tp_dict` with the dictionary C-API. + :c:member:`~PyTypeObject.tp_dict` with the dictionary C-API. .. c:member:: descrgetfunc PyTypeObject.tp_descr_get @@ -792,7 +792,7 @@ the instance variable dictionary; this offset is used by :c:func:`PyObject_GenericGetAttr`. - Do not confuse this field with :attr:`tp_dict`; that is the dictionary for + Do not confuse this field with :c:member:`~PyTypeObject.tp_dict`; that is the dictionary for attributes of the type object itself. If the value of this field is greater than zero, it specifies the offset from @@ -801,20 +801,20 @@ offset is more expensive to use, and should only be used when the instance structure contains a variable-length part. This is used for example to add an instance variable dictionary to subtypes of :class:`str` or :class:`tuple`. Note - that the :attr:`tp_basicsize` field should account for the dictionary added to + that the :c:member:`~PyTypeObject.tp_basicsize` field should account for the dictionary added to the end in that case, even though the dictionary is not included in the basic object layout. On a system with a pointer size of 4 bytes, - :attr:`tp_dictoffset` should be set to ``-4`` to indicate that the dictionary is + :c:member:`~PyTypeObject.tp_dictoffset` should be set to ``-4`` to indicate that the dictionary is at the very end of the structure. The real dictionary offset in an instance can be computed from a negative - :attr:`tp_dictoffset` as follows:: + :c:member:`~PyTypeObject.tp_dictoffset` as follows:: dictoffset = tp_basicsize + abs(ob_size)*tp_itemsize + tp_dictoffset if dictoffset is not aligned on sizeof(void*): round up to sizeof(void*) - where :attr:`tp_basicsize`, :attr:`tp_itemsize` and :attr:`tp_dictoffset` are + where :c:member:`~PyTypeObject.tp_basicsize`, :c:member:`~PyTypeObject.tp_itemsize` and :c:member:`~PyTypeObject.tp_dictoffset` are taken from the type object, and :attr:`ob_size` is taken from the instance. The absolute value is taken because ints use the sign of :attr:`ob_size` to store the sign of the number. (There's never a need to do this calculation @@ -823,15 +823,15 @@ This field is inherited by subtypes, but see the rules listed below. A subtype may override this offset; this means that the subtype instances store the dictionary at a difference offset than the base type. Since the dictionary is - always found via :attr:`tp_dictoffset`, this should not be a problem. + always found via :c:member:`~PyTypeObject.tp_dictoffset`, this should not be a problem. When a type defined by a class statement has no :attr:`__slots__` declaration, and none of its base types has an instance variable dictionary, a dictionary - slot is added to the instance layout and the :attr:`tp_dictoffset` is set to + slot is added to the instance layout and the :c:member:`~PyTypeObject.tp_dictoffset` is set to that slot's offset. When a type defined by a class statement has a :attr:`__slots__` declaration, - the type inherits its :attr:`tp_dictoffset` from its base type. + the type inherits its :c:member:`~PyTypeObject.tp_dictoffset` from its base type. (Adding a slot named :attr:`__dict__` to the :attr:`__slots__` declaration does not have the expected effect, it just causes confusion. Maybe this should be @@ -855,12 +855,12 @@ arguments represent positional and keyword arguments of the call to :meth:`__init__`. - The :attr:`tp_init` function, if not *NULL*, is called when an instance is - created normally by calling its type, after the type's :attr:`tp_new` function - has returned an instance of the type. If the :attr:`tp_new` function returns an + The :c:member:`~PyTypeObject.tp_init` function, if not *NULL*, is called when an instance is + created normally by calling its type, after the type's :c:member:`~PyTypeObject.tp_new` function + has returned an instance of the type. If the :c:member:`~PyTypeObject.tp_new` function returns an instance of some other type that is not a subtype of the original type, no - :attr:`tp_init` function is called; if :attr:`tp_new` returns an instance of a - subtype of the original type, the subtype's :attr:`tp_init` is called. + :c:member:`~PyTypeObject.tp_init` function is called; if :c:member:`~PyTypeObject.tp_new` returns an instance of a + subtype of the original type, the subtype's :c:member:`~PyTypeObject.tp_init` is called. This field is inherited by subtypes. @@ -877,14 +877,14 @@ initialization. It should return a pointer to a block of memory of adequate length for the instance, suitably aligned, and initialized to zeros, but with :attr:`ob_refcnt` set to ``1`` and :attr:`ob_type` set to the type argument. If - the type's :attr:`tp_itemsize` is non-zero, the object's :attr:`ob_size` field + the type's :c:member:`~PyTypeObject.tp_itemsize` is non-zero, the object's :attr:`ob_size` field should be initialized to *nitems* and the length of the allocated memory block should be ``tp_basicsize + nitems*tp_itemsize``, rounded up to a multiple of ``sizeof(void*)``; otherwise, *nitems* is not used and the length of the block - should be :attr:`tp_basicsize`. + should be :c:member:`~PyTypeObject.tp_basicsize`. Do not use this function to do any other instance initialization, not even to - allocate additional memory; that should be done by :attr:`tp_new`. + allocate additional memory; that should be done by :c:member:`~PyTypeObject.tp_new`. This field is inherited by static subtypes, but not by dynamic subtypes (subtypes created by a class statement); in the latter, this field is always set @@ -906,20 +906,20 @@ The subtype argument is the type of the object being created; the *args* and *kwds* arguments represent positional and keyword arguments of the call to the - type. Note that subtype doesn't have to equal the type whose :attr:`tp_new` + type. Note that subtype doesn't have to equal the type whose :c:member:`~PyTypeObject.tp_new` function is called; it may be a subtype of that type (but not an unrelated type). - The :attr:`tp_new` function should call ``subtype->tp_alloc(subtype, nitems)`` + The :c:member:`~PyTypeObject.tp_new` function should call ``subtype->tp_alloc(subtype, nitems)`` to allocate space for the object, and then do only as much further initialization as is absolutely necessary. Initialization that can safely be - ignored or repeated should be placed in the :attr:`tp_init` handler. A good + ignored or repeated should be placed in the :c:member:`~PyTypeObject.tp_init` handler. A good rule of thumb is that for immutable types, all initialization should take place - in :attr:`tp_new`, while for mutable types, most initialization should be - deferred to :attr:`tp_init`. + in :c:member:`~PyTypeObject.tp_new`, while for mutable types, most initialization should be + deferred to :c:member:`~PyTypeObject.tp_init`. This field is inherited by subtypes, except it is not inherited by static types - whose :attr:`tp_base` is *NULL* or ``&PyBaseObject_Type``. + whose :c:member:`~PyTypeObject.tp_base` is *NULL* or ``&PyBaseObject_Type``. .. c:member:: destructor PyTypeObject.tp_free @@ -943,7 +943,7 @@ The garbage collector needs to know whether a particular object is collectible or not. Normally, it is sufficient to look at the object's type's - :attr:`tp_flags` field, and check the :const:`Py_TPFLAGS_HAVE_GC` flag bit. But + :c:member:`~PyTypeObject.tp_flags` field, and check the :const:`Py_TPFLAGS_HAVE_GC` flag bit. But some types have a mixture of statically and dynamically allocated instances, and the statically allocated instances are not collectible. Such types should define this function; it should return ``1`` for a collectible instance, and @@ -983,14 +983,14 @@ void tp_finalize(PyObject *) - If :attr:`tp_finalize` is set, the interpreter calls it once when + If :c:member:`~PyTypeObject.tp_finalize` is set, the interpreter calls it once when finalizing an instance. It is called either from the garbage collector (if the instance is part of an isolated reference cycle) or just before the object is deallocated. Either way, it is guaranteed to be called before attempting to break reference cycles, ensuring that it finds the object in a sane state. - :attr:`tp_finalize` should not mutate the current exception status; + :c:member:`~PyTypeObject.tp_finalize` should not mutate the current exception status; therefore, a recommended way to write a non-trivial finalizer is:: static void @@ -1055,7 +1055,7 @@ .. c:member:: PyTypeObject* PyTypeObject.tp_next - Pointer to the next type object with a non-zero :attr:`tp_allocs` field. + Pointer to the next type object with a non-zero :c:member:`~PyTypeObject.tp_allocs` field. Also, note that, in a garbage collected Python, tp_dealloc may be called from any Python thread, not just the thread which created the object (if the object @@ -1194,13 +1194,13 @@ This function is used by :c:func:`PySequence_Concat` and has the same signature. It is also used by the ``+`` operator, after trying the numeric - addition via the :attr:`tp_as_number.nb_add` slot. + addition via the :c:member:`~PyTypeObject.tp_as_number.nb_add` slot. .. c:member:: ssizeargfunc PySequenceMethods.sq_repeat This function is used by :c:func:`PySequence_Repeat` and has the same signature. It is also used by the ``*`` operator, after trying numeric - multiplication via the :attr:`tp_as_number.nb_mul` slot. + multiplication via the :c:member:`~PyTypeObject.tp_as_number.nb_mul` slot. .. c:member:: ssizeargfunc PySequenceMethods.sq_item diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -135,11 +135,11 @@ .. note:: If you want your type to be subclassable from Python, and your type has the same - :attr:`tp_basicsize` as its base type, you may have problems with multiple + :c:member:`~PyTypeObject.tp_basicsize` as its base type, you may have problems with multiple inheritance. A Python subclass of your type will have to list your type first in its :attr:`__bases__`, or else it will not be able to call your type's :meth:`__new__` method without getting an error. You can avoid this problem by - ensuring that your type has a larger value for :attr:`tp_basicsize` than its + ensuring that your type has a larger value for :c:member:`~PyTypeObject.tp_basicsize` than its base type does. Most of the time, this will be true anyway, because either your base type will be :class:`object`, or else you will be adding data members to your base type, and therefore increasing its size. @@ -160,7 +160,7 @@ members defined until at least Python 3.3. If you need further members, you will need to OR the corresponding flags. -We provide a doc string for the type in :attr:`tp_doc`. :: +We provide a doc string for the type in :c:member:`~PyTypeObject.tp_doc`. :: "Noddy objects", /* tp_doc */ @@ -169,12 +169,12 @@ the module. We'll expand this example later to have more interesting behavior. For now, all we want to be able to do is to create new :class:`Noddy` objects. -To enable object creation, we have to provide a :attr:`tp_new` implementation. +To enable object creation, we have to provide a :c:member:`~PyTypeObject.tp_new` implementation. In this case, we can just use the default implementation provided by the API function :c:func:`PyType_GenericNew`. We'd like to just assign this to the -:attr:`tp_new` slot, but we can't, for portability sake, On some platforms or +:c:member:`~PyTypeObject.tp_new` slot, but we can't, for portability sake, On some platforms or compilers, we can't statically initialize a structure member with a function -defined in another C module, so, instead, we'll assign the :attr:`tp_new` slot +defined in another C module, so, instead, we'll assign the :c:member:`~PyTypeObject.tp_new` slot in the module initialization function just before calling :c:func:`PyType_Ready`:: @@ -269,13 +269,13 @@ Py_TYPE(self)->tp_free((PyObject*)self); } -which is assigned to the :attr:`tp_dealloc` member:: +which is assigned to the :c:member:`~PyTypeObject.tp_dealloc` member:: (destructor)Noddy_dealloc, /*tp_dealloc*/ This method decrements the reference counts of the two Python attributes. We use :c:func:`Py_XDECREF` here because the :attr:`first` and :attr:`last` members -could be *NULL*. It then calls the :attr:`tp_free` member of the object's type +could be *NULL*. It then calls the :c:member:`~PyTypeObject.tp_free` member of the object's type to free the object's memory. Note that the object's type might not be :class:`NoddyType`, because the object may be an instance of a subclass. @@ -307,7 +307,7 @@ return (PyObject *)self; } -and install it in the :attr:`tp_new` member:: +and install it in the :c:member:`~PyTypeObject.tp_new` member:: Noddy_new, /* tp_new */ @@ -327,17 +327,17 @@ created. New methods always accept positional and keyword arguments, but they often ignore the arguments, leaving the argument handling to initializer methods. Note that if the type supports subclassing, the type passed may not be -the type being defined. The new method calls the :attr:`tp_alloc` slot to -allocate memory. We don't fill the :attr:`tp_alloc` slot ourselves. Rather +the type being defined. The new method calls the :c:member:`~PyTypeObject.tp_alloc` slot to +allocate memory. We don't fill the :c:member:`~PyTypeObject.tp_alloc` slot ourselves. Rather :c:func:`PyType_Ready` fills it for us by inheriting it from our base class, which is :class:`object` by default. Most types use the default allocation. .. note:: - If you are creating a co-operative :attr:`tp_new` (one that calls a base type's - :attr:`tp_new` or :meth:`__new__`), you must *not* try to determine what method + If you are creating a co-operative :c:member:`~PyTypeObject.tp_new` (one that calls a base type's + :c:member:`~PyTypeObject.tp_new` or :meth:`__new__`), you must *not* try to determine what method to call using method resolution order at runtime. Always statically determine - what type you are going to call, and call its :attr:`tp_new` directly, or via + what type you are going to call, and call its :c:member:`~PyTypeObject.tp_new` directly, or via ``type->tp_base->tp_new``. If you do not do this, Python subclasses of your type that also inherit from other Python-defined classes may not work correctly. (Specifically, you may not be able to create instances of such subclasses @@ -374,11 +374,11 @@ return 0; } -by filling the :attr:`tp_init` slot. :: +by filling the :c:member:`~PyTypeObject.tp_init` slot. :: (initproc)Noddy_init, /* tp_init */ -The :attr:`tp_init` slot is exposed in Python as the :meth:`__init__` method. It +The :c:member:`~PyTypeObject.tp_init` slot is exposed in Python as the :meth:`__init__` method. It is used to initialize an object after it's created. Unlike the new method, we can't guarantee that the initializer is called. The initializer isn't called when unpickling objects and it can be overridden. Our initializer accepts @@ -408,7 +408,7 @@ * when we know that deallocation of the object [#]_ will not cause any calls back into our type's code -* when decrementing a reference count in a :attr:`tp_dealloc` handler when +* when decrementing a reference count in a :c:member:`~PyTypeObject.tp_dealloc` handler when garbage-collections is not supported [#]_ We want to expose our instance variables as attributes. There are a @@ -424,7 +424,7 @@ {NULL} /* Sentinel */ }; -and put the definitions in the :attr:`tp_members` slot:: +and put the definitions in the :c:member:`~PyTypeObject.tp_members` slot:: Noddy_members, /* tp_members */ @@ -484,7 +484,7 @@ {NULL} /* Sentinel */ }; -and assign them to the :attr:`tp_methods` slot:: +and assign them to the :c:member:`~PyTypeObject.tp_methods` slot:: Noddy_methods, /* tp_methods */ @@ -579,7 +579,7 @@ {NULL} /* Sentinel */ }; -and register it in the :attr:`tp_getset` slot:: +and register it in the :c:member:`~PyTypeObject.tp_getset` slot:: Noddy_getseters, /* tp_getset */ @@ -596,7 +596,7 @@ {NULL} /* Sentinel */ }; -We also need to update the :attr:`tp_init` handler to only allow strings [#]_ to +We also need to update the :c:member:`~PyTypeObject.tp_init` handler to only allow strings [#]_ to be passed:: static int @@ -714,7 +714,7 @@ .. note:: - Note that the :attr:`tp_traverse` implementation must name its arguments exactly + Note that the :c:member:`~PyTypeObject.tp_traverse` implementation must name its arguments exactly *visit* and *arg* in order to use :c:func:`Py_VISIT`. This is to encourage uniformity across these boring implementations. @@ -751,7 +751,7 @@ reference count drops to zero, we might cause code to run that calls back into the object. In addition, because we now support garbage collection, we also have to worry about code being run that triggers garbage collection. If garbage -collection is run, our :attr:`tp_traverse` handler could get called. We can't +collection is run, our :c:member:`~PyTypeObject.tp_traverse` handler could get called. We can't take a chance of having :c:func:`Noddy_traverse` called when a member's reference count has dropped to zero and its value hasn't been set to *NULL*. @@ -771,8 +771,8 @@ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /* tp_flags */ -That's pretty much it. If we had written custom :attr:`tp_alloc` or -:attr:`tp_free` slots, we'd need to modify them for cyclic-garbage collection. +That's pretty much it. If we had written custom :c:member:`~PyTypeObject.tp_alloc` or +:c:member:`~PyTypeObject.tp_free` slots, we'd need to modify them for cyclic-garbage collection. Most extensions will use the versions automatically provided. @@ -831,8 +831,8 @@ This pattern is important when writing a type with custom :attr:`new` and :attr:`dealloc` methods. The :attr:`new` method should not actually create the -memory for the object with :attr:`tp_alloc`, that will be handled by the base -class when calling its :attr:`tp_new`. +memory for the object with :c:member:`~PyTypeObject.tp_alloc`, that will be handled by the base +class when calling its :c:member:`~PyTypeObject.tp_new`. When filling out the :c:func:`PyTypeObject` for the :class:`Shoddy` type, you see a slot for :c:func:`tp_base`. Due to cross platform compiler issues, you can't @@ -858,8 +858,8 @@ } Before calling :c:func:`PyType_Ready`, the type structure must have the -:attr:`tp_base` slot filled in. When we are deriving a new type, it is not -necessary to fill out the :attr:`tp_alloc` slot with :c:func:`PyType_GenericNew` +:c:member:`~PyTypeObject.tp_base` slot filled in. When we are deriving a new type, it is not +necessary to fill out the :c:member:`~PyTypeObject.tp_alloc` slot with :c:func:`PyType_GenericNew` -- the allocate function from the base type will be inherited. After that, calling :c:func:`PyType_Ready` and adding the type object to the @@ -902,7 +902,7 @@ These fields tell the runtime how much memory to allocate when new objects of this type are created. Python has some built-in support for variable length -structures (think: strings, lists) which is where the :attr:`tp_itemsize` field +structures (think: strings, lists) which is where the :c:member:`~PyTypeObject.tp_itemsize` field comes in. This will be dealt with later. :: char *tp_doc; @@ -984,16 +984,16 @@ .. note:: There are limitations to what you can safely do in a deallocator function. - First, if your type supports garbage collection (using :attr:`tp_traverse` - and/or :attr:`tp_clear`), some of the object's members can have been - cleared or finalized by the time :attr:`tp_dealloc` is called. Second, in - :attr:`tp_dealloc`, your object is in an unstable state: its reference + First, if your type supports garbage collection (using :c:member:`~PyTypeObject.tp_traverse` + and/or :c:member:`~PyTypeObject.tp_clear`), some of the object's members can have been + cleared or finalized by the time :c:member:`~PyTypeObject.tp_dealloc` is called. Second, in + :c:member:`~PyTypeObject.tp_dealloc`, your object is in an unstable state: its reference count is equal to zero. Any call to a non-trivial object or API (as in the - example above) might end up calling :attr:`tp_dealloc` again, causing a + example above) might end up calling :c:member:`~PyTypeObject.tp_dealloc` again, causing a double free and a crash. Starting with Python 3.4, it is recommended not to put any complex - finalization code in :attr:`tp_dealloc`, and instead use the new + finalization code in :c:member:`~PyTypeObject.tp_dealloc`, and instead use the new :c:member:`~PyTypeObject.tp_finalize` type method. .. seealso:: @@ -1015,7 +1015,7 @@ reprfunc tp_repr; reprfunc tp_str; -The :attr:`tp_repr` handler should return a string object containing a +The :c:member:`~PyTypeObject.tp_repr` handler should return a string object containing a representation of the instance for which it is called. Here is a simple example:: @@ -1026,15 +1026,15 @@ obj->obj_UnderlyingDatatypePtr->size); } -If no :attr:`tp_repr` handler is specified, the interpreter will supply a -representation that uses the type's :attr:`tp_name` and a uniquely-identifying +If no :c:member:`~PyTypeObject.tp_repr` handler is specified, the interpreter will supply a +representation that uses the type's :c:member:`~PyTypeObject.tp_name` and a uniquely-identifying value for the object. -The :attr:`tp_str` handler is to :func:`str` what the :attr:`tp_repr` handler +The :c:member:`~PyTypeObject.tp_str` handler is to :func:`str` what the :c:member:`~PyTypeObject.tp_repr` handler described above is to :func:`repr`; that is, it is called when Python code calls :func:`str` on an instance of your object. Its implementation is very similar -to the :attr:`tp_repr` function, but the resulting string is intended for human -consumption. If :attr:`tp_str` is not specified, the :attr:`tp_repr` handler is +to the :c:member:`~PyTypeObject.tp_repr` function, but the resulting string is intended for human +consumption. If :c:member:`~PyTypeObject.tp_str` is not specified, the :c:member:`~PyTypeObject.tp_repr` handler is used instead. Here is a simple example:: @@ -1099,7 +1099,7 @@ type object. Each descriptor controls access to one attribute of the instance object. Each of the tables is optional; if all three are *NULL*, instances of the type will only have attributes that are inherited from their base type, and -should leave the :attr:`tp_getattro` and :attr:`tp_setattro` fields *NULL* as +should leave the :c:member:`~PyTypeObject.tp_getattro` and :c:member:`~PyTypeObject.tp_setattro` fields *NULL* as well, allowing the base type to handle attributes. The tables are declared as three fields of the type object:: @@ -1108,7 +1108,7 @@ struct PyMemberDef *tp_members; struct PyGetSetDef *tp_getset; -If :attr:`tp_methods` is not *NULL*, it must refer to an array of +If :c:member:`~PyTypeObject.tp_methods` is not *NULL*, it must refer to an array of :c:type:`PyMethodDef` structures. Each entry in the table is an instance of this structure:: @@ -1164,13 +1164,13 @@ single: WRITE_RESTRICTED single: RESTRICTED -An interesting advantage of using the :attr:`tp_members` table to build +An interesting advantage of using the :c:member:`~PyTypeObject.tp_members` table to build descriptors that are used at runtime is that any attribute defined this way can have an associated doc string simply by providing the text in the table. An application can use the introspection API to retrieve the descriptor from the class object, and get the doc string using its :attr:`__doc__` attribute. -As with the :attr:`tp_methods` table, a sentinel entry with a :attr:`name` value +As with the :c:member:`~PyTypeObject.tp_methods` table, a sentinel entry with a :attr:`name` value of *NULL* is required. .. XXX Descriptors need to be explained in more detail somewhere, but not here. @@ -1194,7 +1194,7 @@ called, so that if you do need to extend their functionality, you'll understand what needs to be done. -The :attr:`tp_getattr` handler is called when the object requires an attribute +The :c:member:`~PyTypeObject.tp_getattr` handler is called when the object requires an attribute look-up. It is called in the same situations where the :meth:`__getattr__` method of a class would be called. @@ -1214,11 +1214,11 @@ return NULL; } -The :attr:`tp_setattr` handler is called when the :meth:`__setattr__` or +The :c:member:`~PyTypeObject.tp_setattr` handler is called when the :meth:`__setattr__` or :meth:`__delattr__` method of a class instance would be called. When an attribute should be deleted, the third parameter will be *NULL*. Here is an example that simply raises an exception; if this were really all you wanted, the -:attr:`tp_setattr` handler should be set to *NULL*. :: +:c:member:`~PyTypeObject.tp_setattr` handler should be set to *NULL*. :: static int newdatatype_setattr(newdatatypeobject *obj, char *name, PyObject *v) @@ -1234,7 +1234,7 @@ richcmpfunc tp_richcompare; -The :attr:`tp_richcompare` handler is called when comparisons are needed. It is +The :c:member:`~PyTypeObject.tp_richcompare` handler is called when comparisons are needed. It is analogous to the :ref:`rich comparison methods `, like :meth:`__lt__`, and also called by :c:func:`PyObject_RichCompare` and :c:func:`PyObject_RichCompareBool`. @@ -1325,7 +1325,7 @@ This function is called when an instance of your data type is "called", for example, if ``obj1`` is an instance of your data type and the Python script -contains ``obj1('hello')``, the :attr:`tp_call` handler is invoked. +contains ``obj1('hello')``, the :c:member:`~PyTypeObject.tp_call` handler is invoked. This function takes three arguments: @@ -1412,7 +1412,7 @@ For an object to be weakly referencable, the extension must include a :c:type:`PyObject\*` field in the instance structure for the use of the weak reference mechanism; it must be initialized to *NULL* by the object's -constructor. It must also set the :attr:`tp_weaklistoffset` field of the +constructor. It must also set the :c:member:`~PyTypeObject.tp_weaklistoffset` field of the corresponding type object to the offset of the field. For example, the instance type is defined with the following structure:: @@ -1498,7 +1498,7 @@ .. [#] This is true when we know that the object is a basic type, like a string or a float. -.. [#] We relied on this in the :attr:`tp_dealloc` handler in this example, because our +.. [#] We relied on this in the :c:member:`~PyTypeObject.tp_dealloc` handler in this example, because our type doesn't support garbage collection. Even if a type supports garbage collection, there are calls that can be made to "untrack" the object from garbage collection, however, these calls are advanced and not covered here. diff --git a/Doc/library/gc.rst b/Doc/library/gc.rst --- a/Doc/library/gc.rst +++ b/Doc/library/gc.rst @@ -139,8 +139,8 @@ Return a list of objects directly referred to by any of the arguments. The referents returned are those objects visited by the arguments' C-level - :attr:`tp_traverse` methods (if any), and may not be all objects actually - directly reachable. :attr:`tp_traverse` methods are supported only by objects + :c:member:`~PyTypeObject.tp_traverse` methods (if any), and may not be all objects actually + directly reachable. :c:member:`~PyTypeObject.tp_traverse` methods are supported only by objects that support garbage collection, and are only required to visit objects that may be involved in a cycle. So, for example, if an integer is directly reachable from an argument, that integer object may or may not appear in the result list. diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -751,7 +751,7 @@ iterators for those iteration types. (An example of an object supporting multiple forms of iteration would be a tree structure which supports both breadth-first and depth-first traversal.) This method corresponds to the - :attr:`tp_iter` slot of the type structure for Python objects in the Python/C + :c:member:`~PyTypeObject.tp_iter` slot of the type structure for Python objects in the Python/C API. The iterator objects themselves are required to support the following two @@ -762,7 +762,7 @@ Return the iterator object itself. This is required to allow both containers and iterators to be used with the :keyword:`for` and :keyword:`in` statements. - This method corresponds to the :attr:`tp_iter` slot of the type structure for + This method corresponds to the :c:member:`~PyTypeObject.tp_iter` slot of the type structure for Python objects in the Python/C API. @@ -770,7 +770,7 @@ Return the next item from the container. If there are no further items, raise the :exc:`StopIteration` exception. This method corresponds to the - :attr:`tp_iternext` slot of the type structure for Python objects in the + :c:member:`~PyTypeObject.tp_iternext` slot of the type structure for Python objects in the Python/C API. Python defines several iterator objects to support iteration over general and diff --git a/Doc/whatsnew/2.2.rst b/Doc/whatsnew/2.2.rst --- a/Doc/whatsnew/2.2.rst +++ b/Doc/whatsnew/2.2.rst @@ -450,9 +450,9 @@ Python classes can define an :meth:`__iter__` method, which should create and return a new iterator for the object; if the object is its own iterator, this method can just return ``self``. In particular, iterators will usually be their -own iterators. Extension types implemented in C can implement a :attr:`tp_iter` +own iterators. Extension types implemented in C can implement a :c:member:`~PyTypeObject.tp_iter` function in order to return an iterator, and extension types that want to behave -as iterators can define a :attr:`tp_iternext` function. +as iterators can define a :c:member:`~PyTypeObject.tp_iternext` function. So, after all this, what do iterators actually do? They have one required method, :meth:`next`, which takes no arguments and returns the next value. When @@ -478,7 +478,7 @@ In 2.2, Python's :keyword:`for` statement no longer expects a sequence; it expects something for which :func:`iter` will return an iterator. For backward compatibility and convenience, an iterator is automatically constructed for -sequences that don't implement :meth:`__iter__` or a :attr:`tp_iter` slot, so +sequences that don't implement :meth:`__iter__` or a :c:member:`~PyTypeObject.tp_iter` slot, so ``for i in [1,2,3]`` will still work. Wherever the Python interpreter loops over a sequence, it's been changed to use the iterator protocol. This means you can do things like this:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 21:17:34 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 1 Aug 2013 21:17:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NTg5?= =?utf-8?q?=3A_fix_hyperlinking_of_type_slots_=28tp=5F*=29?= Message-ID: <3c5h8t04Vhz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/bb546f6d8ab4 changeset: 84957:bb546f6d8ab4 branch: 2.7 parent: 84928:addd9210816b user: Antoine Pitrou date: Thu Aug 01 21:17:24 2013 +0200 summary: Issue #18589: fix hyperlinking of type slots (tp_*) files: Doc/c-api/allocation.rst | 6 +- Doc/c-api/gcsupport.rst | 28 +- Doc/c-api/structures.rst | 4 +- Doc/c-api/typeobj.rst | 280 ++++++++++++------------ Doc/extending/newtypes.rst | 98 ++++---- Doc/library/gc.rst | 4 +- Doc/library/stdtypes.rst | 6 +- Doc/whatsnew/2.2.rst | 6 +- 8 files changed, 216 insertions(+), 216 deletions(-) diff --git a/Doc/c-api/allocation.rst b/Doc/c-api/allocation.rst --- a/Doc/c-api/allocation.rst +++ b/Doc/c-api/allocation.rst @@ -43,7 +43,7 @@ Allocate a new Python object using the C structure type *TYPE* and the Python type object *type*. Fields not defined by the Python object header are not initialized; the object's reference count will be one. The size of - the memory allocation is determined from the :attr:`tp_basicsize` field of + the memory allocation is determined from the :c:member:`~PyTypeObject.tp_basicsize` field of the type object. @@ -52,7 +52,7 @@ Allocate a new Python object using the C structure type *TYPE* and the Python type object *type*. Fields not defined by the Python object header are not initialized. The allocated memory allows for the *TYPE* structure - plus *size* fields of the size given by the :attr:`tp_itemsize` field of + plus *size* fields of the size given by the :c:member:`~PyTypeObject.tp_itemsize` field of *type*. This is useful for implementing objects like tuples, which are able to determine their size at construction time. Embedding the array of fields into the same allocation decreases the number of allocations, @@ -67,7 +67,7 @@ Releases memory allocated to an object using :c:func:`PyObject_New` or :c:func:`PyObject_NewVar`. This is normally called from the - :attr:`tp_dealloc` handler specified in the object's type. The fields of + :c:member:`~PyTypeObject.tp_dealloc` handler specified in the object's type. The fields of the object should not be accessed after this call as the memory is no longer a valid Python object. diff --git a/Doc/c-api/gcsupport.rst b/Doc/c-api/gcsupport.rst --- a/Doc/c-api/gcsupport.rst +++ b/Doc/c-api/gcsupport.rst @@ -15,10 +15,10 @@ .. An example showing the use of these interfaces can be found in "Supporting the .. Cycle Collector (XXX not found: ../ext/example-cycle-support.html)". -To create a container type, the :attr:`tp_flags` field of the type object must +To create a container type, the :c:member:`~PyTypeObject.tp_flags` field of the type object must include the :const:`Py_TPFLAGS_HAVE_GC` and provide an implementation of the -:attr:`tp_traverse` handler. If instances of the type are mutable, a -:attr:`tp_clear` implementation must also be provided. +:c:member:`~PyTypeObject.tp_traverse` handler. If instances of the type are mutable, a +:c:member:`~PyTypeObject.tp_clear` implementation must also be provided. .. data:: Py_TPFLAGS_HAVE_GC @@ -68,7 +68,7 @@ Adds the object *op* to the set of container objects tracked by the collector. The collector can run at unexpected times so objects must be valid while being tracked. This should be called once all the fields - followed by the :attr:`tp_traverse` handler become valid, usually near the + followed by the :c:member:`~PyTypeObject.tp_traverse` handler become valid, usually near the end of the constructor. @@ -97,8 +97,8 @@ Remove the object *op* from the set of container objects tracked by the collector. Note that :c:func:`PyObject_GC_Track` can be called again on this object to add it back to the set of tracked objects. The deallocator - (:attr:`tp_dealloc` handler) should call this for the object before any of - the fields used by the :attr:`tp_traverse` handler become invalid. + (:c:member:`~PyTypeObject.tp_dealloc` handler) should call this for the object before any of + the fields used by the :c:member:`~PyTypeObject.tp_traverse` handler become invalid. .. c:function:: void _PyObject_GC_UNTRACK(PyObject *op) @@ -106,19 +106,19 @@ A macro version of :c:func:`PyObject_GC_UnTrack`. It should not be used for extension modules. -The :attr:`tp_traverse` handler accepts a function parameter of this type: +The :c:member:`~PyTypeObject.tp_traverse` handler accepts a function parameter of this type: .. c:type:: int (*visitproc)(PyObject *object, void *arg) - Type of the visitor function passed to the :attr:`tp_traverse` handler. + Type of the visitor function passed to the :c:member:`~PyTypeObject.tp_traverse` handler. The function should be called with an object to traverse as *object* and - the third parameter to the :attr:`tp_traverse` handler as *arg*. The + the third parameter to the :c:member:`~PyTypeObject.tp_traverse` handler as *arg*. The Python core uses several visitor functions to implement cyclic garbage detection; it's not expected that users will need to write their own visitor functions. -The :attr:`tp_traverse` handler must have the following type: +The :c:member:`~PyTypeObject.tp_traverse` handler must have the following type: .. c:type:: int (*traverseproc)(PyObject *self, visitproc visit, void *arg) @@ -130,15 +130,15 @@ object argument. If *visit* returns a non-zero value that value should be returned immediately. -To simplify writing :attr:`tp_traverse` handlers, a :c:func:`Py_VISIT` macro is -provided. In order to use this macro, the :attr:`tp_traverse` implementation +To simplify writing :c:member:`~PyTypeObject.tp_traverse` handlers, a :c:func:`Py_VISIT` macro is +provided. In order to use this macro, the :c:member:`~PyTypeObject.tp_traverse` implementation must name its arguments exactly *visit* and *arg*: .. c:function:: void Py_VISIT(PyObject *o) Call the *visit* callback, with arguments *o* and *arg*. If *visit* returns - a non-zero value, then return it. Using this macro, :attr:`tp_traverse` + a non-zero value, then return it. Using this macro, :c:member:`~PyTypeObject.tp_traverse` handlers look like:: static int @@ -151,7 +151,7 @@ .. versionadded:: 2.4 -The :attr:`tp_clear` handler must be of the :c:type:`inquiry` type, or *NULL* +The :c:member:`~PyTypeObject.tp_clear` handler must be of the :c:type:`inquiry` type, or *NULL* if the object is immutable. diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -293,6 +293,6 @@ .. c:function:: PyObject* Py_FindMethod(PyMethodDef table[], PyObject *ob, char *name) Return a bound method object for an extension type implemented in C. This - can be useful in the implementation of a :attr:`tp_getattro` or - :attr:`tp_getattr` handler that does not use the + can be useful in the implementation of a :c:member:`~PyTypeObject.tp_getattro` or + :c:member:`~PyTypeObject.tp_getattr` handler that does not use the :c:func:`PyObject_GenericGetAttr` function. diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -35,7 +35,7 @@ The type object structure extends the :c:type:`PyVarObject` structure. The :attr:`ob_size` field is used for dynamic types (created by :func:`type_new`, usually called from a class statement). Note that :c:data:`PyType_Type` (the -metatype) initializes :attr:`tp_itemsize`, which means that its instances (i.e. +metatype) initializes :c:member:`~PyTypeObject.tp_itemsize`, which means that its instances (i.e. type objects) *must* have the :attr:`ob_size` field. @@ -108,7 +108,7 @@ should be just the type name. If the module is a submodule of a package, the full package name is part of the full module name. For example, a type named :class:`T` defined in module :mod:`M` in subpackage :mod:`Q` in package :mod:`P` - should have the :attr:`tp_name` initializer ``"P.Q.M.T"``. + should have the :c:member:`~PyTypeObject.tp_name` initializer ``"P.Q.M.T"``. For dynamically allocated type objects, this should just be the type name, and the module name explicitly stored in the type dict as the value for key @@ -119,7 +119,7 @@ attribute, and everything after the last dot is made accessible as the :attr:`__name__` attribute. - If no dot is present, the entire :attr:`tp_name` field is made accessible as the + If no dot is present, the entire :c:member:`~PyTypeObject.tp_name` field is made accessible as the :attr:`__name__` attribute, and the :attr:`__module__` attribute is undefined (unless explicitly set in the dictionary, as explained above). This means your type will be impossible to pickle. @@ -133,13 +133,13 @@ These fields allow calculating the size in bytes of instances of the type. There are two kinds of types: types with fixed-length instances have a zero - :attr:`tp_itemsize` field, types with variable-length instances have a non-zero - :attr:`tp_itemsize` field. For a type with fixed-length instances, all - instances have the same size, given in :attr:`tp_basicsize`. + :c:member:`~PyTypeObject.tp_itemsize` field, types with variable-length instances have a non-zero + :c:member:`~PyTypeObject.tp_itemsize` field. For a type with fixed-length instances, all + instances have the same size, given in :c:member:`~PyTypeObject.tp_basicsize`. For a type with variable-length instances, the instances must have an - :attr:`ob_size` field, and the instance size is :attr:`tp_basicsize` plus N - times :attr:`tp_itemsize`, where N is the "length" of the object. The value of + :attr:`ob_size` field, and the instance size is :c:member:`~PyTypeObject.tp_basicsize` plus N + times :c:member:`~PyTypeObject.tp_itemsize`, where N is the "length" of the object. The value of N is typically stored in the instance's :attr:`ob_size` field. There are exceptions: for example, long ints use a negative :attr:`ob_size` to indicate a negative number, and N is ``abs(ob_size)`` there. Also, the presence of an @@ -152,21 +152,21 @@ :c:macro:`PyObject_HEAD` or :c:macro:`PyObject_VAR_HEAD` (whichever is used to declare the instance struct) and this in turn includes the :attr:`_ob_prev` and :attr:`_ob_next` fields if they are present. This means that the only correct - way to get an initializer for the :attr:`tp_basicsize` is to use the + way to get an initializer for the :c:member:`~PyTypeObject.tp_basicsize` is to use the ``sizeof`` operator on the struct used to declare the instance layout. The basic size does not include the GC header size (this is new in Python 2.2; - in 2.1 and 2.0, the GC header size was included in :attr:`tp_basicsize`). + in 2.1 and 2.0, the GC header size was included in :c:member:`~PyTypeObject.tp_basicsize`). These fields are inherited separately by subtypes. If the base type has a - non-zero :attr:`tp_itemsize`, it is generally not safe to set - :attr:`tp_itemsize` to a different non-zero value in a subtype (though this + non-zero :c:member:`~PyTypeObject.tp_itemsize`, it is generally not safe to set + :c:member:`~PyTypeObject.tp_itemsize` to a different non-zero value in a subtype (though this depends on the implementation of the base type). A note about alignment: if the variable items require a particular alignment, - this should be taken care of by the value of :attr:`tp_basicsize`. Example: - suppose a type implements an array of ``double``. :attr:`tp_itemsize` is + this should be taken care of by the value of :c:member:`~PyTypeObject.tp_basicsize`. Example: + suppose a type implements an array of ``double``. :c:member:`~PyTypeObject.tp_itemsize` is ``sizeof(double)``. It is the programmer's responsibility that - :attr:`tp_basicsize` is a multiple of ``sizeof(double)`` (assuming this is the + :c:member:`~PyTypeObject.tp_basicsize` is a multiple of ``sizeof(double)`` (assuming this is the alignment requirement for ``double``). @@ -182,10 +182,10 @@ destructor function should free all references which the instance owns, free all memory buffers owned by the instance (using the freeing function corresponding to the allocation function used to allocate the buffer), and finally (as its - last action) call the type's :attr:`tp_free` function. If the type is not + last action) call the type's :c:member:`~PyTypeObject.tp_free` function. If the type is not subtypable (doesn't have the :const:`Py_TPFLAGS_BASETYPE` flag bit set), it is permissible to call the object deallocator directly instead of via - :attr:`tp_free`. The object deallocator should be the one used to allocate the + :c:member:`~PyTypeObject.tp_free`. The object deallocator should be the one used to allocate the instance; this is normally :c:func:`PyObject_Del` if the instance was allocated using :c:func:`PyObject_New` or :c:func:`PyObject_VarNew`, or :c:func:`PyObject_GC_Del` if the instance was allocated using @@ -200,25 +200,25 @@ The print function is only called when the instance is printed to a *real* file; when it is printed to a pseudo-file (like a :class:`StringIO` instance), the - instance's :attr:`tp_repr` or :attr:`tp_str` function is called to convert it to - a string. These are also called when the type's :attr:`tp_print` field is - *NULL*. A type should never implement :attr:`tp_print` in a way that produces - different output than :attr:`tp_repr` or :attr:`tp_str` would. + instance's :c:member:`~PyTypeObject.tp_repr` or :c:member:`~PyTypeObject.tp_str` function is called to convert it to + a string. These are also called when the type's :c:member:`~PyTypeObject.tp_print` field is + *NULL*. A type should never implement :c:member:`~PyTypeObject.tp_print` in a way that produces + different output than :c:member:`~PyTypeObject.tp_repr` or :c:member:`~PyTypeObject.tp_str` would. The print function is called with the same signature as :c:func:`PyObject_Print`: ``int tp_print(PyObject *self, FILE *file, int flags)``. The *self* argument is the instance to be printed. The *file* argument is the stdio file to which it is to be printed. The *flags* argument is composed of flag bits. The only flag bit currently defined is :const:`Py_PRINT_RAW`. When the :const:`Py_PRINT_RAW` - flag bit is set, the instance should be printed the same way as :attr:`tp_str` + flag bit is set, the instance should be printed the same way as :c:member:`~PyTypeObject.tp_str` would format it; when the :const:`Py_PRINT_RAW` flag bit is clear, the instance - should be printed the same was as :attr:`tp_repr` would format it. It should + should be printed the same was as :c:member:`~PyTypeObject.tp_repr` would format it. It should return ``-1`` and set an exception condition when an error occurred during the comparison. - It is possible that the :attr:`tp_print` field will be deprecated. In any case, - it is recommended not to define :attr:`tp_print`, but instead to rely on - :attr:`tp_repr` and :attr:`tp_str` for printing. + It is possible that the :c:member:`~PyTypeObject.tp_print` field will be deprecated. In any case, + it is recommended not to define :c:member:`~PyTypeObject.tp_print`, but instead to rely on + :c:member:`~PyTypeObject.tp_repr` and :c:member:`~PyTypeObject.tp_str` for printing. This field is inherited by subtypes. @@ -228,13 +228,13 @@ An optional pointer to the get-attribute-string function. This field is deprecated. When it is defined, it should point to a function - that acts the same as the :attr:`tp_getattro` function, but taking a C string + that acts the same as the :c:member:`~PyTypeObject.tp_getattro` function, but taking a C string instead of a Python string object to give the attribute name. The signature is the same as for :c:func:`PyObject_GetAttrString`. - This field is inherited by subtypes together with :attr:`tp_getattro`: a subtype - inherits both :attr:`tp_getattr` and :attr:`tp_getattro` from its base type when - the subtype's :attr:`tp_getattr` and :attr:`tp_getattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_getattro`: a subtype + inherits both :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` are both *NULL*. .. c:member:: setattrfunc PyTypeObject.tp_setattr @@ -242,13 +242,13 @@ An optional pointer to the set-attribute-string function. This field is deprecated. When it is defined, it should point to a function - that acts the same as the :attr:`tp_setattro` function, but taking a C string + that acts the same as the :c:member:`~PyTypeObject.tp_setattro` function, but taking a C string instead of a Python string object to give the attribute name. The signature is the same as for :c:func:`PyObject_SetAttrString`. - This field is inherited by subtypes together with :attr:`tp_setattro`: a subtype - inherits both :attr:`tp_setattr` and :attr:`tp_setattro` from its base type when - the subtype's :attr:`tp_setattr` and :attr:`tp_setattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_setattro`: a subtype + inherits both :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` are both *NULL*. .. c:member:: cmpfunc PyTypeObject.tp_compare @@ -260,10 +260,10 @@ *other*, and ``-1`` if *self* less than *other*. It should return ``-1`` and set an exception condition when an error occurred during the comparison. - This field is inherited by subtypes together with :attr:`tp_richcompare` and - :attr:`tp_hash`: a subtypes inherits all three of :attr:`tp_compare`, - :attr:`tp_richcompare`, and :attr:`tp_hash` when the subtype's - :attr:`tp_compare`, :attr:`tp_richcompare`, and :attr:`tp_hash` are all *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_richcompare` and + :c:member:`~PyTypeObject.tp_hash`: a subtypes inherits all three of :c:member:`~PyTypeObject.tp_compare`, + :c:member:`~PyTypeObject.tp_richcompare`, and :c:member:`~PyTypeObject.tp_hash` when the subtype's + :c:member:`~PyTypeObject.tp_compare`, :c:member:`~PyTypeObject.tp_richcompare`, and :c:member:`~PyTypeObject.tp_hash` are all *NULL*. .. c:member:: reprfunc PyTypeObject.tp_repr @@ -292,7 +292,7 @@ objects which implement the number protocol. These fields are documented in :ref:`number-structs`. - The :attr:`tp_as_number` field is not inherited, but the contained fields are + The :c:member:`~PyTypeObject.tp_as_number` field is not inherited, but the contained fields are inherited individually. @@ -302,7 +302,7 @@ objects which implement the sequence protocol. These fields are documented in :ref:`sequence-structs`. - The :attr:`tp_as_sequence` field is not inherited, but the contained fields + The :c:member:`~PyTypeObject.tp_as_sequence` field is not inherited, but the contained fields are inherited individually. @@ -312,7 +312,7 @@ objects which implement the mapping protocol. These fields are documented in :ref:`mapping-structs`. - The :attr:`tp_as_mapping` field is not inherited, but the contained fields + The :c:member:`~PyTypeObject.tp_as_mapping` field is not inherited, but the contained fields are inherited individually. @@ -336,14 +336,14 @@ the Python level will result in the ``tp_hash`` slot being set to :c:func:`PyObject_HashNotImplemented`. - When this field is not set, two possibilities exist: if the :attr:`tp_compare` - and :attr:`tp_richcompare` fields are both *NULL*, a default hash value based on + When this field is not set, two possibilities exist: if the :c:member:`~PyTypeObject.tp_compare` + and :c:member:`~PyTypeObject.tp_richcompare` fields are both *NULL*, a default hash value based on the object's address is returned; otherwise, a :exc:`TypeError` is raised. - This field is inherited by subtypes together with :attr:`tp_richcompare` and - :attr:`tp_compare`: a subtypes inherits all three of :attr:`tp_compare`, - :attr:`tp_richcompare`, and :attr:`tp_hash`, when the subtype's - :attr:`tp_compare`, :attr:`tp_richcompare` and :attr:`tp_hash` are all *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_richcompare` and + :c:member:`~PyTypeObject.tp_compare`: a subtypes inherits all three of :c:member:`~PyTypeObject.tp_compare`, + :c:member:`~PyTypeObject.tp_richcompare`, and :c:member:`~PyTypeObject.tp_hash`, when the subtype's + :c:member:`~PyTypeObject.tp_compare`, :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash` are all *NULL*. .. c:member:: ternaryfunc PyTypeObject.tp_call @@ -381,9 +381,9 @@ convenient to set this field to :c:func:`PyObject_GenericGetAttr`, which implements the normal way of looking for object attributes. - This field is inherited by subtypes together with :attr:`tp_getattr`: a subtype - inherits both :attr:`tp_getattr` and :attr:`tp_getattro` from its base type when - the subtype's :attr:`tp_getattr` and :attr:`tp_getattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_getattr`: a subtype + inherits both :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` are both *NULL*. .. c:member:: setattrofunc PyTypeObject.tp_setattro @@ -394,9 +394,9 @@ convenient to set this field to :c:func:`PyObject_GenericSetAttr`, which implements the normal way of setting object attributes. - This field is inherited by subtypes together with :attr:`tp_setattr`: a subtype - inherits both :attr:`tp_setattr` and :attr:`tp_setattro` from its base type when - the subtype's :attr:`tp_setattr` and :attr:`tp_setattro` are both *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_setattr`: a subtype + inherits both :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` from its base type when + the subtype's :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` are both *NULL*. .. c:member:: PyBufferProcs* PyTypeObject.tp_as_buffer @@ -405,7 +405,7 @@ which implement the buffer interface. These fields are documented in :ref:`buffer-structs`. - The :attr:`tp_as_buffer` field is not inherited, but the contained fields are + The :c:member:`~PyTypeObject.tp_as_buffer` field is not inherited, but the contained fields are inherited individually. @@ -414,8 +414,8 @@ This field is a bit mask of various flags. Some flags indicate variant semantics for certain situations; others are used to indicate that certain fields in the type object (or in the extension structures referenced via - :attr:`tp_as_number`, :attr:`tp_as_sequence`, :attr:`tp_as_mapping`, and - :attr:`tp_as_buffer`) that were historically not always present are valid; if + :c:member:`~PyTypeObject.tp_as_number`, :c:member:`~PyTypeObject.tp_as_sequence`, :c:member:`~PyTypeObject.tp_as_mapping`, and + :c:member:`~PyTypeObject.tp_as_buffer`) that were historically not always present are valid; if such a flag bit is clear, the type fields it guards must not be accessed and must be considered to have a zero or *NULL* value instead. @@ -425,14 +425,14 @@ inherited if the extension structure is inherited, i.e. the base type's value of the flag bit is copied into the subtype together with a pointer to the extension structure. The :const:`Py_TPFLAGS_HAVE_GC` flag bit is inherited together with - the :attr:`tp_traverse` and :attr:`tp_clear` fields, i.e. if the + the :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` fields, i.e. if the :const:`Py_TPFLAGS_HAVE_GC` flag bit is clear in the subtype and the - :attr:`tp_traverse` and :attr:`tp_clear` fields in the subtype exist (as + :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` fields in the subtype exist (as indicated by the :const:`Py_TPFLAGS_HAVE_RICHCOMPARE` flag bit) and have *NULL* values. The following bit masks are currently defined; these can be ORed together using - the ``|`` operator to form the value of the :attr:`tp_flags` field. The macro + the ``|`` operator to form the value of the :c:member:`~PyTypeObject.tp_flags` field. The macro :c:func:`PyType_HasFeature` takes a type and a flags value, *tp* and *f*, and checks whether ``tp->tp_flags & f`` is non-zero. @@ -440,13 +440,13 @@ .. data:: Py_TPFLAGS_HAVE_GETCHARBUFFER If this bit is set, the :c:type:`PyBufferProcs` struct referenced by - :attr:`tp_as_buffer` has the :attr:`bf_getcharbuffer` field. + :c:member:`~PyTypeObject.tp_as_buffer` has the :attr:`bf_getcharbuffer` field. .. data:: Py_TPFLAGS_HAVE_SEQUENCE_IN If this bit is set, the :c:type:`PySequenceMethods` struct referenced by - :attr:`tp_as_sequence` has the :attr:`sq_contains` field. + :c:member:`~PyTypeObject.tp_as_sequence` has the :attr:`sq_contains` field. .. data:: Py_TPFLAGS_GC @@ -458,8 +458,8 @@ .. data:: Py_TPFLAGS_HAVE_INPLACEOPS If this bit is set, the :c:type:`PySequenceMethods` struct referenced by - :attr:`tp_as_sequence` and the :c:type:`PyNumberMethods` structure referenced by - :attr:`tp_as_number` contain the fields for in-place operators. In particular, + :c:member:`~PyTypeObject.tp_as_sequence` and the :c:type:`PyNumberMethods` structure referenced by + :c:member:`~PyTypeObject.tp_as_number` contain the fields for in-place operators. In particular, this means that the :c:type:`PyNumberMethods` structure has the fields :attr:`nb_inplace_add`, :attr:`nb_inplace_subtract`, :attr:`nb_inplace_multiply`, :attr:`nb_inplace_divide`, @@ -473,7 +473,7 @@ .. data:: Py_TPFLAGS_CHECKTYPES If this bit is set, the binary and ternary operations in the - :c:type:`PyNumberMethods` structure referenced by :attr:`tp_as_number` accept + :c:type:`PyNumberMethods` structure referenced by :c:member:`~PyTypeObject.tp_as_number` accept arguments of arbitrary object types, and do their own type conversions if needed. If this bit is clear, those operations require that all arguments have the current type as their type, and the caller is supposed to perform a coercion @@ -485,31 +485,31 @@ .. data:: Py_TPFLAGS_HAVE_RICHCOMPARE - If this bit is set, the type object has the :attr:`tp_richcompare` field, as - well as the :attr:`tp_traverse` and the :attr:`tp_clear` fields. + If this bit is set, the type object has the :c:member:`~PyTypeObject.tp_richcompare` field, as + well as the :c:member:`~PyTypeObject.tp_traverse` and the :c:member:`~PyTypeObject.tp_clear` fields. .. data:: Py_TPFLAGS_HAVE_WEAKREFS - If this bit is set, the :attr:`tp_weaklistoffset` field is defined. Instances - of a type are weakly referenceable if the type's :attr:`tp_weaklistoffset` field + If this bit is set, the :c:member:`~PyTypeObject.tp_weaklistoffset` field is defined. Instances + of a type are weakly referenceable if the type's :c:member:`~PyTypeObject.tp_weaklistoffset` field has a value greater than zero. .. data:: Py_TPFLAGS_HAVE_ITER - If this bit is set, the type object has the :attr:`tp_iter` and - :attr:`tp_iternext` fields. + If this bit is set, the type object has the :c:member:`~PyTypeObject.tp_iter` and + :c:member:`~PyTypeObject.tp_iternext` fields. .. data:: Py_TPFLAGS_HAVE_CLASS If this bit is set, the type object has several new fields defined starting in - Python 2.2: :attr:`tp_methods`, :attr:`tp_members`, :attr:`tp_getset`, - :attr:`tp_base`, :attr:`tp_dict`, :attr:`tp_descr_get`, :attr:`tp_descr_set`, - :attr:`tp_dictoffset`, :attr:`tp_init`, :attr:`tp_alloc`, :attr:`tp_new`, - :attr:`tp_free`, :attr:`tp_is_gc`, :attr:`tp_bases`, :attr:`tp_mro`, - :attr:`tp_cache`, :attr:`tp_subclasses`, and :attr:`tp_weaklist`. + Python 2.2: :c:member:`~PyTypeObject.tp_methods`, :c:member:`~PyTypeObject.tp_members`, :c:member:`~PyTypeObject.tp_getset`, + :c:member:`~PyTypeObject.tp_base`, :c:member:`~PyTypeObject.tp_dict`, :c:member:`~PyTypeObject.tp_descr_get`, :c:member:`~PyTypeObject.tp_descr_set`, + :c:member:`~PyTypeObject.tp_dictoffset`, :c:member:`~PyTypeObject.tp_init`, :c:member:`~PyTypeObject.tp_alloc`, :c:member:`~PyTypeObject.tp_new`, + :c:member:`~PyTypeObject.tp_free`, :c:member:`~PyTypeObject.tp_is_gc`, :c:member:`~PyTypeObject.tp_bases`, :c:member:`~PyTypeObject.tp_mro`, + :c:member:`~PyTypeObject.tp_cache`, :c:member:`~PyTypeObject.tp_subclasses`, and :c:member:`~PyTypeObject.tp_weaklist`. .. data:: Py_TPFLAGS_HEAPTYPE @@ -547,7 +547,7 @@ is set, instances must be created using :c:func:`PyObject_GC_New` and destroyed using :c:func:`PyObject_GC_Del`. More information in section :ref:`supporting-cycle-detection`. This bit also implies that the - GC-related fields :attr:`tp_traverse` and :attr:`tp_clear` are present in + GC-related fields :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` are present in the type object; but those fields also exist when :const:`Py_TPFLAGS_HAVE_GC` is clear but :const:`Py_TPFLAGS_HAVE_RICHCOMPARE` is set. @@ -582,8 +582,8 @@ about Python's garbage collection scheme can be found in section :ref:`supporting-cycle-detection`. - The :attr:`tp_traverse` pointer is used by the garbage collector to detect - reference cycles. A typical implementation of a :attr:`tp_traverse` function + The :c:member:`~PyTypeObject.tp_traverse` pointer is used by the garbage collector to detect + reference cycles. A typical implementation of a :c:member:`~PyTypeObject.tp_traverse` function simply calls :c:func:`Py_VISIT` on each of the instance's members that are Python objects. For example, this is function :c:func:`local_traverse` from the :mod:`thread` extension module:: @@ -609,9 +609,9 @@ :c:func:`local_traverse` to have these specific names; don't name them just anything. - This field is inherited by subtypes together with :attr:`tp_clear` and the - :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :attr:`tp_traverse`, and - :attr:`tp_clear` are all inherited from the base type if they are all zero in + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_clear` and the + :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :c:member:`~PyTypeObject.tp_traverse`, and + :c:member:`~PyTypeObject.tp_clear` are all inherited from the base type if they are all zero in the subtype *and* the subtype has the :const:`Py_TPFLAGS_HAVE_RICHCOMPARE` flag bit set. @@ -621,17 +621,17 @@ An optional pointer to a clear function for the garbage collector. This is only used if the :const:`Py_TPFLAGS_HAVE_GC` flag bit is set. - The :attr:`tp_clear` member function is used to break reference cycles in cyclic - garbage detected by the garbage collector. Taken together, all :attr:`tp_clear` + The :c:member:`~PyTypeObject.tp_clear` member function is used to break reference cycles in cyclic + garbage detected by the garbage collector. Taken together, all :c:member:`~PyTypeObject.tp_clear` functions in the system must combine to break all reference cycles. This is - subtle, and if in any doubt supply a :attr:`tp_clear` function. For example, - the tuple type does not implement a :attr:`tp_clear` function, because it's + subtle, and if in any doubt supply a :c:member:`~PyTypeObject.tp_clear` function. For example, + the tuple type does not implement a :c:member:`~PyTypeObject.tp_clear` function, because it's possible to prove that no reference cycle can be composed entirely of tuples. - Therefore the :attr:`tp_clear` functions of other types must be sufficient to + Therefore the :c:member:`~PyTypeObject.tp_clear` functions of other types must be sufficient to break any cycle containing a tuple. This isn't immediately obvious, and there's - rarely a good reason to avoid implementing :attr:`tp_clear`. + rarely a good reason to avoid implementing :c:member:`~PyTypeObject.tp_clear`. - Implementations of :attr:`tp_clear` should drop the instance's references to + Implementations of :c:member:`~PyTypeObject.tp_clear` should drop the instance's references to those of its members that may be Python objects, and set its pointers to those members to *NULL*, as in the following example:: @@ -656,18 +656,18 @@ so that *self* knows the contained object can no longer be used. The :c:func:`Py_CLEAR` macro performs the operations in a safe order. - Because the goal of :attr:`tp_clear` functions is to break reference cycles, + Because the goal of :c:member:`~PyTypeObject.tp_clear` functions is to break reference cycles, it's not necessary to clear contained objects like Python strings or Python integers, which can't participate in reference cycles. On the other hand, it may be convenient to clear all contained Python objects, and write the type's - :attr:`tp_dealloc` function to invoke :attr:`tp_clear`. + :c:member:`~PyTypeObject.tp_dealloc` function to invoke :c:member:`~PyTypeObject.tp_clear`. More information about Python's garbage collection scheme can be found in section :ref:`supporting-cycle-detection`. - This field is inherited by subtypes together with :attr:`tp_traverse` and the - :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :attr:`tp_traverse`, and - :attr:`tp_clear` are all inherited from the base type if they are all zero in + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_traverse` and the + :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :c:member:`~PyTypeObject.tp_traverse`, and + :c:member:`~PyTypeObject.tp_clear` are all inherited from the base type if they are all zero in the subtype *and* the subtype has the :const:`Py_TPFLAGS_HAVE_RICHCOMPARE` flag bit set. @@ -688,13 +688,13 @@ comparisons makes sense (e.g. ``==`` and ``!=``, but not ``<`` and friends), directly raise :exc:`TypeError` in the rich comparison function. - This field is inherited by subtypes together with :attr:`tp_compare` and - :attr:`tp_hash`: a subtype inherits all three of :attr:`tp_compare`, - :attr:`tp_richcompare`, and :attr:`tp_hash`, when the subtype's - :attr:`tp_compare`, :attr:`tp_richcompare`, and :attr:`tp_hash` are all *NULL*. + This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_compare` and + :c:member:`~PyTypeObject.tp_hash`: a subtype inherits all three of :c:member:`~PyTypeObject.tp_compare`, + :c:member:`~PyTypeObject.tp_richcompare`, and :c:member:`~PyTypeObject.tp_hash`, when the subtype's + :c:member:`~PyTypeObject.tp_compare`, :c:member:`~PyTypeObject.tp_richcompare`, and :c:member:`~PyTypeObject.tp_hash` are all *NULL*. The following constants are defined to be used as the third argument for - :attr:`tp_richcompare` and for :c:func:`PyObject_RichCompare`: + :c:member:`~PyTypeObject.tp_richcompare` and for :c:func:`PyObject_RichCompare`: +----------------+------------+ | Constant | Comparison | @@ -725,26 +725,26 @@ instance structure needs to include a field of type :c:type:`PyObject\*` which is initialized to *NULL*. - Do not confuse this field with :attr:`tp_weaklist`; that is the list head for + Do not confuse this field with :c:member:`~PyTypeObject.tp_weaklist`; that is the list head for weak references to the type object itself. This field is inherited by subtypes, but see the rules listed below. A subtype may override this offset; this means that the subtype uses a different weak reference list head than the base type. Since the list head is always found via - :attr:`tp_weaklistoffset`, this should not be a problem. + :c:member:`~PyTypeObject.tp_weaklistoffset`, this should not be a problem. When a type defined by a class statement has no :attr:`__slots__` declaration, and none of its base types are weakly referenceable, the type is made weakly referenceable by adding a weak reference list head slot to the instance layout - and setting the :attr:`tp_weaklistoffset` of that slot's offset. + and setting the :c:member:`~PyTypeObject.tp_weaklistoffset` of that slot's offset. When a type's :attr:`__slots__` declaration contains a slot named :attr:`__weakref__`, that slot becomes the weak reference list head for instances of the type, and the slot's offset is stored in the type's - :attr:`tp_weaklistoffset`. + :c:member:`~PyTypeObject.tp_weaklistoffset`. When a type's :attr:`__slots__` declaration does not contain a slot named - :attr:`__weakref__`, the type inherits its :attr:`tp_weaklistoffset` from its + :attr:`__weakref__`, the type inherits its :c:member:`~PyTypeObject.tp_weaklistoffset` from its base type. The next two fields only exist if the :const:`Py_TPFLAGS_HAVE_ITER` flag bit is @@ -772,7 +772,7 @@ are iterators (although classic instances always have this function, even if they don't define a :meth:`next` method). - Iterator types should also define the :attr:`tp_iter` function, and that + Iterator types should also define the :c:member:`~PyTypeObject.tp_iter` function, and that function should return the iterator instance itself (not a new iterator instance). @@ -780,7 +780,7 @@ This field is inherited by subtypes. -The next fields, up to and including :attr:`tp_weaklist`, only exist if the +The next fields, up to and including :c:member:`~PyTypeObject.tp_weaklist`, only exist if the :const:`Py_TPFLAGS_HAVE_CLASS` flag bit is set. @@ -790,7 +790,7 @@ structures, declaring regular methods of this type. For each entry in the array, an entry is added to the type's dictionary (see - :attr:`tp_dict` below) containing a method descriptor. + :c:member:`~PyTypeObject.tp_dict` below) containing a method descriptor. This field is not inherited by subtypes (methods are inherited through a different mechanism). @@ -803,7 +803,7 @@ this type. For each entry in the array, an entry is added to the type's dictionary (see - :attr:`tp_dict` below) containing a member descriptor. + :c:member:`~PyTypeObject.tp_dict` below) containing a member descriptor. This field is not inherited by subtypes (members are inherited through a different mechanism). @@ -815,7 +815,7 @@ structures, declaring computed attributes of instances of this type. For each entry in the array, an entry is added to the type's dictionary (see - :attr:`tp_dict` below) containing a getset descriptor. + :c:member:`~PyTypeObject.tp_dict` below) containing a getset descriptor. This field is not inherited by subtypes (computed attributes are inherited through a different mechanism). @@ -894,7 +894,7 @@ the instance variable dictionary; this offset is used by :c:func:`PyObject_GenericGetAttr`. - Do not confuse this field with :attr:`tp_dict`; that is the dictionary for + Do not confuse this field with :c:member:`~PyTypeObject.tp_dict`; that is the dictionary for attributes of the type object itself. If the value of this field is greater than zero, it specifies the offset from @@ -903,20 +903,20 @@ offset is more expensive to use, and should only be used when the instance structure contains a variable-length part. This is used for example to add an instance variable dictionary to subtypes of :class:`str` or :class:`tuple`. Note - that the :attr:`tp_basicsize` field should account for the dictionary added to + that the :c:member:`~PyTypeObject.tp_basicsize` field should account for the dictionary added to the end in that case, even though the dictionary is not included in the basic object layout. On a system with a pointer size of 4 bytes, - :attr:`tp_dictoffset` should be set to ``-4`` to indicate that the dictionary is + :c:member:`~PyTypeObject.tp_dictoffset` should be set to ``-4`` to indicate that the dictionary is at the very end of the structure. The real dictionary offset in an instance can be computed from a negative - :attr:`tp_dictoffset` as follows:: + :c:member:`~PyTypeObject.tp_dictoffset` as follows:: dictoffset = tp_basicsize + abs(ob_size)*tp_itemsize + tp_dictoffset if dictoffset is not aligned on sizeof(void*): round up to sizeof(void*) - where :attr:`tp_basicsize`, :attr:`tp_itemsize` and :attr:`tp_dictoffset` are + where :c:member:`~PyTypeObject.tp_basicsize`, :c:member:`~PyTypeObject.tp_itemsize` and :c:member:`~PyTypeObject.tp_dictoffset` are taken from the type object, and :attr:`ob_size` is taken from the instance. The absolute value is taken because long ints use the sign of :attr:`ob_size` to store the sign of the number. (There's never a need to do this calculation @@ -925,15 +925,15 @@ This field is inherited by subtypes, but see the rules listed below. A subtype may override this offset; this means that the subtype instances store the dictionary at a difference offset than the base type. Since the dictionary is - always found via :attr:`tp_dictoffset`, this should not be a problem. + always found via :c:member:`~PyTypeObject.tp_dictoffset`, this should not be a problem. When a type defined by a class statement has no :attr:`__slots__` declaration, and none of its base types has an instance variable dictionary, a dictionary - slot is added to the instance layout and the :attr:`tp_dictoffset` is set to + slot is added to the instance layout and the :c:member:`~PyTypeObject.tp_dictoffset` is set to that slot's offset. When a type defined by a class statement has a :attr:`__slots__` declaration, - the type inherits its :attr:`tp_dictoffset` from its base type. + the type inherits its :c:member:`~PyTypeObject.tp_dictoffset` from its base type. (Adding a slot named :attr:`__dict__` to the :attr:`__slots__` declaration does not have the expected effect, it just causes confusion. Maybe this should be @@ -957,15 +957,15 @@ arguments represent positional and keyword arguments of the call to :meth:`__init__`. - The :attr:`tp_init` function, if not *NULL*, is called when an instance is - created normally by calling its type, after the type's :attr:`tp_new` function - has returned an instance of the type. If the :attr:`tp_new` function returns an + The :c:member:`~PyTypeObject.tp_init` function, if not *NULL*, is called when an instance is + created normally by calling its type, after the type's :c:member:`~PyTypeObject.tp_new` function + has returned an instance of the type. If the :c:member:`~PyTypeObject.tp_new` function returns an instance of some other type that is not a subtype of the original type, no - :attr:`tp_init` function is called; if :attr:`tp_new` returns an instance of a - subtype of the original type, the subtype's :attr:`tp_init` is called. (VERSION + :c:member:`~PyTypeObject.tp_init` function is called; if :c:member:`~PyTypeObject.tp_new` returns an instance of a + subtype of the original type, the subtype's :c:member:`~PyTypeObject.tp_init` is called. (VERSION NOTE: described here is what is implemented in Python 2.2.1 and later. In - Python 2.2, the :attr:`tp_init` of the type of the object returned by - :attr:`tp_new` was always called, if not *NULL*.) + Python 2.2, the :c:member:`~PyTypeObject.tp_init` of the type of the object returned by + :c:member:`~PyTypeObject.tp_new` was always called, if not *NULL*.) This field is inherited by subtypes. @@ -982,14 +982,14 @@ initialization. It should return a pointer to a block of memory of adequate length for the instance, suitably aligned, and initialized to zeros, but with :attr:`ob_refcnt` set to ``1`` and :attr:`ob_type` set to the type argument. If - the type's :attr:`tp_itemsize` is non-zero, the object's :attr:`ob_size` field + the type's :c:member:`~PyTypeObject.tp_itemsize` is non-zero, the object's :attr:`ob_size` field should be initialized to *nitems* and the length of the allocated memory block should be ``tp_basicsize + nitems*tp_itemsize``, rounded up to a multiple of ``sizeof(void*)``; otherwise, *nitems* is not used and the length of the block - should be :attr:`tp_basicsize`. + should be :c:member:`~PyTypeObject.tp_basicsize`. Do not use this function to do any other instance initialization, not even to - allocate additional memory; that should be done by :attr:`tp_new`. + allocate additional memory; that should be done by :c:member:`~PyTypeObject.tp_new`. This field is inherited by static subtypes, but not by dynamic subtypes (subtypes created by a class statement); in the latter, this field is always set @@ -1011,20 +1011,20 @@ The subtype argument is the type of the object being created; the *args* and *kwds* arguments represent positional and keyword arguments of the call to the - type. Note that subtype doesn't have to equal the type whose :attr:`tp_new` + type. Note that subtype doesn't have to equal the type whose :c:member:`~PyTypeObject.tp_new` function is called; it may be a subtype of that type (but not an unrelated type). - The :attr:`tp_new` function should call ``subtype->tp_alloc(subtype, nitems)`` + The :c:member:`~PyTypeObject.tp_new` function should call ``subtype->tp_alloc(subtype, nitems)`` to allocate space for the object, and then do only as much further initialization as is absolutely necessary. Initialization that can safely be - ignored or repeated should be placed in the :attr:`tp_init` handler. A good + ignored or repeated should be placed in the :c:member:`~PyTypeObject.tp_init` handler. A good rule of thumb is that for immutable types, all initialization should take place - in :attr:`tp_new`, while for mutable types, most initialization should be - deferred to :attr:`tp_init`. + in :c:member:`~PyTypeObject.tp_new`, while for mutable types, most initialization should be + deferred to :c:member:`~PyTypeObject.tp_init`. This field is inherited by subtypes, except it is not inherited by static types - whose :attr:`tp_base` is *NULL* or ``&PyBaseObject_Type``. The latter exception + whose :c:member:`~PyTypeObject.tp_base` is *NULL* or ``&PyBaseObject_Type``. The latter exception is a precaution so that old extension types don't become callable simply by being linked with Python 2.2. @@ -1057,7 +1057,7 @@ The garbage collector needs to know whether a particular object is collectible or not. Normally, it is sufficient to look at the object's type's - :attr:`tp_flags` field, and check the :const:`Py_TPFLAGS_HAVE_GC` flag bit. But + :c:member:`~PyTypeObject.tp_flags` field, and check the :const:`Py_TPFLAGS_HAVE_GC` flag bit. But some types have a mixture of statically and dynamically allocated instances, and the statically allocated instances are not collectible. Such types should define this function; it should return ``1`` for a collectible instance, and @@ -1129,7 +1129,7 @@ .. c:member:: PyTypeObject* PyTypeObject.tp_next - Pointer to the next type object with a non-zero :attr:`tp_allocs` field. + Pointer to the next type object with a non-zero :c:member:`~PyTypeObject.tp_allocs` field. Also, note that, in a garbage collected Python, tp_dealloc may be called from any Python thread, not just the thread which created the object (if the object @@ -1289,13 +1289,13 @@ This function is used by :c:func:`PySequence_Concat` and has the same signature. It is also used by the ``+`` operator, after trying the numeric - addition via the :attr:`tp_as_number.nb_add` slot. + addition via the :c:member:`~PyTypeObject.tp_as_number.nb_add` slot. .. c:member:: ssizeargfunc PySequenceMethods.sq_repeat This function is used by :c:func:`PySequence_Repeat` and has the same signature. It is also used by the ``*`` operator, after trying numeric - multiplication via the :attr:`tp_as_number.nb_mul` slot. + multiplication via the :c:member:`~PyTypeObject.tp_as_number.nb_mul` slot. .. c:member:: ssizeargfunc PySequenceMethods.sq_item @@ -1348,14 +1348,14 @@ pointer/length pair. These chunks are called :dfn:`segments` and are presumed to be non-contiguous in memory. -If an object does not export the buffer interface, then its :attr:`tp_as_buffer` +If an object does not export the buffer interface, then its :c:member:`~PyTypeObject.tp_as_buffer` member in the :c:type:`PyTypeObject` structure should be *NULL*. Otherwise, the -:attr:`tp_as_buffer` will point to a :c:type:`PyBufferProcs` structure. +:c:member:`~PyTypeObject.tp_as_buffer` will point to a :c:type:`PyBufferProcs` structure. .. note:: It is very important that your :c:type:`PyTypeObject` structure uses - :const:`Py_TPFLAGS_DEFAULT` for the value of the :attr:`tp_flags` member rather + :const:`Py_TPFLAGS_DEFAULT` for the value of the :c:member:`~PyTypeObject.tp_flags` member rather than ``0``. This tells the Python runtime that your :c:type:`PyBufferProcs` structure contains the :attr:`bf_getcharbuffer` slot. Older versions of Python did not have this member, so a new Python interpreter using an old extension @@ -1385,7 +1385,7 @@ The last slot is :attr:`bf_getcharbuffer`, of type :c:type:`getcharbufferproc`. This slot will only be present if the :const:`Py_TPFLAGS_HAVE_GETCHARBUFFER` - flag is present in the :attr:`tp_flags` field of the object's + flag is present in the :c:member:`~PyTypeObject.tp_flags` field of the object's :c:type:`PyTypeObject`. Before using this slot, the caller should test whether it is present by using the :c:func:`PyType_HasFeature` function. If the flag is present, :attr:`bf_getcharbuffer` may be *NULL*, indicating that the object's diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -150,11 +150,11 @@ .. note:: If you want your type to be subclassable from Python, and your type has the same - :attr:`tp_basicsize` as its base type, you may have problems with multiple + :c:member:`~PyTypeObject.tp_basicsize` as its base type, you may have problems with multiple inheritance. A Python subclass of your type will have to list your type first in its :attr:`__bases__`, or else it will not be able to call your type's :meth:`__new__` method without getting an error. You can avoid this problem by - ensuring that your type has a larger value for :attr:`tp_basicsize` than its + ensuring that your type has a larger value for :c:member:`~PyTypeObject.tp_basicsize` than its base type does. Most of the time, this will be true anyway, because either your base type will be :class:`object`, or else you will be adding data members to your base type, and therefore increasing its size. @@ -174,7 +174,7 @@ All types should include this constant in their flags. It enables all of the members defined by the current version of Python. -We provide a doc string for the type in :attr:`tp_doc`. :: +We provide a doc string for the type in :c:member:`~PyTypeObject.tp_doc`. :: "Noddy objects", /* tp_doc */ @@ -183,12 +183,12 @@ the module. We'll expand this example later to have more interesting behavior. For now, all we want to be able to do is to create new :class:`Noddy` objects. -To enable object creation, we have to provide a :attr:`tp_new` implementation. +To enable object creation, we have to provide a :c:member:`~PyTypeObject.tp_new` implementation. In this case, we can just use the default implementation provided by the API function :c:func:`PyType_GenericNew`. We'd like to just assign this to the -:attr:`tp_new` slot, but we can't, for portability sake, On some platforms or +:c:member:`~PyTypeObject.tp_new` slot, but we can't, for portability sake, On some platforms or compilers, we can't statically initialize a structure member with a function -defined in another C module, so, instead, we'll assign the :attr:`tp_new` slot +defined in another C module, so, instead, we'll assign the :c:member:`~PyTypeObject.tp_new` slot in the module initialization function just before calling :c:func:`PyType_Ready`:: @@ -283,13 +283,13 @@ self->ob_type->tp_free((PyObject*)self); } -which is assigned to the :attr:`tp_dealloc` member:: +which is assigned to the :c:member:`~PyTypeObject.tp_dealloc` member:: (destructor)Noddy_dealloc, /*tp_dealloc*/ This method decrements the reference counts of the two Python attributes. We use :c:func:`Py_XDECREF` here because the :attr:`first` and :attr:`last` members -could be *NULL*. It then calls the :attr:`tp_free` member of the object's type +could be *NULL*. It then calls the :c:member:`~PyTypeObject.tp_free` member of the object's type to free the object's memory. Note that the object's type might not be :class:`NoddyType`, because the object may be an instance of a subclass. @@ -323,7 +323,7 @@ return (PyObject *)self; } -and install it in the :attr:`tp_new` member:: +and install it in the :c:member:`~PyTypeObject.tp_new` member:: Noddy_new, /* tp_new */ @@ -344,16 +344,16 @@ often ignore the arguments, leaving the argument handling to initializer methods. Note that if the type supports subclassing, the type passed may not be the type being defined. The new method calls the tp_alloc slot to allocate -memory. We don't fill the :attr:`tp_alloc` slot ourselves. Rather +memory. We don't fill the :c:member:`~PyTypeObject.tp_alloc` slot ourselves. Rather :c:func:`PyType_Ready` fills it for us by inheriting it from our base class, which is :class:`object` by default. Most types use the default allocation. .. note:: - If you are creating a co-operative :attr:`tp_new` (one that calls a base type's - :attr:`tp_new` or :meth:`__new__`), you must *not* try to determine what method + If you are creating a co-operative :c:member:`~PyTypeObject.tp_new` (one that calls a base type's + :c:member:`~PyTypeObject.tp_new` or :meth:`__new__`), you must *not* try to determine what method to call using method resolution order at runtime. Always statically determine - what type you are going to call, and call its :attr:`tp_new` directly, or via + what type you are going to call, and call its :c:member:`~PyTypeObject.tp_new` directly, or via ``type->tp_base->tp_new``. If you do not do this, Python subclasses of your type that also inherit from other Python-defined classes may not work correctly. (Specifically, you may not be able to create instances of such subclasses @@ -390,11 +390,11 @@ return 0; } -by filling the :attr:`tp_init` slot. :: +by filling the :c:member:`~PyTypeObject.tp_init` slot. :: (initproc)Noddy_init, /* tp_init */ -The :attr:`tp_init` slot is exposed in Python as the :meth:`__init__` method. It +The :c:member:`~PyTypeObject.tp_init` slot is exposed in Python as the :meth:`__init__` method. It is used to initialize an object after it's created. Unlike the new method, we can't guarantee that the initializer is called. The initializer isn't called when unpickling objects and it can be overridden. Our initializer accepts @@ -424,7 +424,7 @@ * when we know that deallocation of the object [#]_ will not cause any calls back into our type's code -* when decrementing a reference count in a :attr:`tp_dealloc` handler when +* when decrementing a reference count in a :c:member:`~PyTypeObject.tp_dealloc` handler when garbage-collections is not supported [#]_ We want to expose our instance variables as attributes. There are a @@ -440,7 +440,7 @@ {NULL} /* Sentinel */ }; -and put the definitions in the :attr:`tp_members` slot:: +and put the definitions in the :c:member:`~PyTypeObject.tp_members` slot:: Noddy_members, /* tp_members */ @@ -516,7 +516,7 @@ {NULL} /* Sentinel */ }; -and assign them to the :attr:`tp_methods` slot:: +and assign them to the :c:member:`~PyTypeObject.tp_methods` slot:: Noddy_methods, /* tp_methods */ @@ -611,7 +611,7 @@ {NULL} /* Sentinel */ }; -and register it in the :attr:`tp_getset` slot:: +and register it in the :c:member:`~PyTypeObject.tp_getset` slot:: Noddy_getseters, /* tp_getset */ @@ -628,7 +628,7 @@ {NULL} /* Sentinel */ }; -We also need to update the :attr:`tp_init` handler to only allow strings [#]_ to +We also need to update the :c:member:`~PyTypeObject.tp_init` handler to only allow strings [#]_ to be passed:: static int @@ -747,7 +747,7 @@ .. note:: - Note that the :attr:`tp_traverse` implementation must name its arguments exactly + Note that the :c:member:`~PyTypeObject.tp_traverse` implementation must name its arguments exactly *visit* and *arg* in order to use :c:func:`Py_VISIT`. This is to encourage uniformity across these boring implementations. @@ -784,7 +784,7 @@ reference count drops to zero, we might cause code to run that calls back into the object. In addition, because we now support garbage collection, we also have to worry about code being run that triggers garbage collection. If garbage -collection is run, our :attr:`tp_traverse` handler could get called. We can't +collection is run, our :c:member:`~PyTypeObject.tp_traverse` handler could get called. We can't take a chance of having :c:func:`Noddy_traverse` called when a member's reference count has dropped to zero and its value hasn't been set to *NULL*. @@ -804,8 +804,8 @@ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /*tp_flags*/ -That's pretty much it. If we had written custom :attr:`tp_alloc` or -:attr:`tp_free` slots, we'd need to modify them for cyclic-garbage collection. +That's pretty much it. If we had written custom :c:member:`~PyTypeObject.tp_alloc` or +:c:member:`~PyTypeObject.tp_free` slots, we'd need to modify them for cyclic-garbage collection. Most extensions will use the versions automatically provided. @@ -864,8 +864,8 @@ This pattern is important when writing a type with custom :attr:`new` and :attr:`dealloc` methods. The :attr:`new` method should not actually create the -memory for the object with :attr:`tp_alloc`, that will be handled by the base -class when calling its :attr:`tp_new`. +memory for the object with :c:member:`~PyTypeObject.tp_alloc`, that will be handled by the base +class when calling its :c:member:`~PyTypeObject.tp_new`. When filling out the :c:func:`PyTypeObject` for the :class:`Shoddy` type, you see a slot for :c:func:`tp_base`. Due to cross platform compiler issues, you can't @@ -890,8 +890,8 @@ } Before calling :c:func:`PyType_Ready`, the type structure must have the -:attr:`tp_base` slot filled in. When we are deriving a new type, it is not -necessary to fill out the :attr:`tp_alloc` slot with :c:func:`PyType_GenericNew` +:c:member:`~PyTypeObject.tp_base` slot filled in. When we are deriving a new type, it is not +necessary to fill out the :c:member:`~PyTypeObject.tp_alloc` slot with :c:func:`PyType_GenericNew` -- the allocate function from the base type will be inherited. After that, calling :c:func:`PyType_Ready` and adding the type object to the @@ -934,7 +934,7 @@ These fields tell the runtime how much memory to allocate when new objects of this type are created. Python has some built-in support for variable length -structures (think: strings, lists) which is where the :attr:`tp_itemsize` field +structures (think: strings, lists) which is where the :c:member:`~PyTypeObject.tp_itemsize` field comes in. This will be dealt with later. :: char *tp_doc; @@ -1032,13 +1032,13 @@ expensive. These handlers are all optional, and most types at most need to implement the -:attr:`tp_str` and :attr:`tp_repr` handlers. :: +:c:member:`~PyTypeObject.tp_str` and :c:member:`~PyTypeObject.tp_repr` handlers. :: reprfunc tp_repr; reprfunc tp_str; printfunc tp_print; -The :attr:`tp_repr` handler should return a string object containing a +The :c:member:`~PyTypeObject.tp_repr` handler should return a string object containing a representation of the instance for which it is called. Here is a simple example:: @@ -1049,15 +1049,15 @@ obj->obj_UnderlyingDatatypePtr->size); } -If no :attr:`tp_repr` handler is specified, the interpreter will supply a -representation that uses the type's :attr:`tp_name` and a uniquely-identifying +If no :c:member:`~PyTypeObject.tp_repr` handler is specified, the interpreter will supply a +representation that uses the type's :c:member:`~PyTypeObject.tp_name` and a uniquely-identifying value for the object. -The :attr:`tp_str` handler is to :func:`str` what the :attr:`tp_repr` handler +The :c:member:`~PyTypeObject.tp_str` handler is to :func:`str` what the :c:member:`~PyTypeObject.tp_repr` handler described above is to :func:`repr`; that is, it is called when Python code calls :func:`str` on an instance of your object. Its implementation is very similar -to the :attr:`tp_repr` function, but the resulting string is intended for human -consumption. If :attr:`tp_str` is not specified, the :attr:`tp_repr` handler is +to the :c:member:`~PyTypeObject.tp_repr` function, but the resulting string is intended for human +consumption. If :c:member:`~PyTypeObject.tp_str` is not specified, the :c:member:`~PyTypeObject.tp_repr` handler is used instead. Here is a simple example:: @@ -1152,7 +1152,7 @@ type object. Each descriptor controls access to one attribute of the instance object. Each of the tables is optional; if all three are *NULL*, instances of the type will only have attributes that are inherited from their base type, and -should leave the :attr:`tp_getattro` and :attr:`tp_setattro` fields *NULL* as +should leave the :c:member:`~PyTypeObject.tp_getattro` and :c:member:`~PyTypeObject.tp_setattro` fields *NULL* as well, allowing the base type to handle attributes. The tables are declared as three fields of the type object:: @@ -1161,7 +1161,7 @@ struct PyMemberDef *tp_members; struct PyGetSetDef *tp_getset; -If :attr:`tp_methods` is not *NULL*, it must refer to an array of +If :c:member:`~PyTypeObject.tp_methods` is not *NULL*, it must refer to an array of :c:type:`PyMethodDef` structures. Each entry in the table is an instance of this structure:: @@ -1225,13 +1225,13 @@ single: WRITE_RESTRICTED single: RESTRICTED -An interesting advantage of using the :attr:`tp_members` table to build +An interesting advantage of using the :c:member:`~PyTypeObject.tp_members` table to build descriptors that are used at runtime is that any attribute defined this way can have an associated doc string simply by providing the text in the table. An application can use the introspection API to retrieve the descriptor from the class object, and get the doc string using its :attr:`__doc__` attribute. -As with the :attr:`tp_methods` table, a sentinel entry with a :attr:`name` value +As with the :c:member:`~PyTypeObject.tp_methods` table, a sentinel entry with a :attr:`name` value of *NULL* is required. .. XXX Descriptors need to be explained in more detail somewhere, but not here. @@ -1257,7 +1257,7 @@ called, so that if you do need to extend their functionality, you'll understand what needs to be done. -The :attr:`tp_getattr` handler is called when the object requires an attribute +The :c:member:`~PyTypeObject.tp_getattr` handler is called when the object requires an attribute look-up. It is called in the same situations where the :meth:`__getattr__` method of a class would be called. @@ -1265,7 +1265,7 @@ :c:func:`newdatatype_getSize` and :c:func:`newdatatype_setSize` in the example below), (2) provide a method table listing these functions, and (3) provide a getattr function that returns the result of a lookup in that table. The method -table uses the same structure as the :attr:`tp_methods` field of the type +table uses the same structure as the :c:member:`~PyTypeObject.tp_methods` field of the type object. Here is an example:: @@ -1284,11 +1284,11 @@ return Py_FindMethod(newdatatype_methods, (PyObject *)obj, name); } -The :attr:`tp_setattr` handler is called when the :meth:`__setattr__` or +The :c:member:`~PyTypeObject.tp_setattr` handler is called when the :meth:`__setattr__` or :meth:`__delattr__` method of a class instance would be called. When an attribute should be deleted, the third parameter will be *NULL*. Here is an example that simply raises an exception; if this were really all you wanted, the -:attr:`tp_setattr` handler should be set to *NULL*. :: +:c:member:`~PyTypeObject.tp_setattr` handler should be set to *NULL*. :: static int newdatatype_setattr(newdatatypeobject *obj, char *name, PyObject *v) @@ -1305,7 +1305,7 @@ cmpfunc tp_compare; -The :attr:`tp_compare` handler is called when comparisons are needed and the +The :c:member:`~PyTypeObject.tp_compare` handler is called when comparisons are needed and the object does not implement the specific rich comparison method which matches the requested comparison. (It is always used if defined and the :c:func:`PyObject_Compare` or :c:func:`PyObject_Cmp` functions are used, or if @@ -1316,7 +1316,7 @@ greater than, respectively; as of Python 2.2, this is no longer allowed. In the future, other return values may be assigned a different meaning.) -A :attr:`tp_compare` handler may raise an exception. In this case it should +A :c:member:`~PyTypeObject.tp_compare` handler may raise an exception. In this case it should return a negative value. The caller has to test for the exception using :c:func:`PyErr_Occurred`. @@ -1391,7 +1391,7 @@ This function is called when an instance of your data type is "called", for example, if ``obj1`` is an instance of your data type and the Python script -contains ``obj1('hello')``, the :attr:`tp_call` handler is invoked. +contains ``obj1('hello')``, the :c:member:`~PyTypeObject.tp_call` handler is invoked. This function takes three arguments: @@ -1480,7 +1480,7 @@ For an object to be weakly referencable, the extension must include a :c:type:`PyObject\*` field in the instance structure for the use of the weak reference mechanism; it must be initialized to *NULL* by the object's -constructor. It must also set the :attr:`tp_weaklistoffset` field of the +constructor. It must also set the :c:member:`~PyTypeObject.tp_weaklistoffset` field of the corresponding type object to the offset of the field. For example, the instance type is defined with the following structure:: @@ -1566,7 +1566,7 @@ .. [#] This is true when we know that the object is a basic type, like a string or a float. -.. [#] We relied on this in the :attr:`tp_dealloc` handler in this example, because our +.. [#] We relied on this in the :c:member:`~PyTypeObject.tp_dealloc` handler in this example, because our type doesn't support garbage collection. Even if a type supports garbage collection, there are calls that can be made to "untrack" the object from garbage collection, however, these calls are advanced and not covered here. diff --git a/Doc/library/gc.rst b/Doc/library/gc.rst --- a/Doc/library/gc.rst +++ b/Doc/library/gc.rst @@ -132,8 +132,8 @@ Return a list of objects directly referred to by any of the arguments. The referents returned are those objects visited by the arguments' C-level - :attr:`tp_traverse` methods (if any), and may not be all objects actually - directly reachable. :attr:`tp_traverse` methods are supported only by objects + :c:member:`~PyTypeObject.tp_traverse` methods (if any), and may not be all objects actually + directly reachable. :c:member:`~PyTypeObject.tp_traverse` methods are supported only by objects that support garbage collection, and are only required to visit objects that may be involved in a cycle. So, for example, if an integer is directly reachable from an argument, that integer object may or may not appear in the result list. diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -614,7 +614,7 @@ iterators for those iteration types. (An example of an object supporting multiple forms of iteration would be a tree structure which supports both breadth-first and depth-first traversal.) This method corresponds to the - :attr:`tp_iter` slot of the type structure for Python objects in the Python/C + :c:member:`~PyTypeObject.tp_iter` slot of the type structure for Python objects in the Python/C API. The iterator objects themselves are required to support the following two @@ -625,7 +625,7 @@ Return the iterator object itself. This is required to allow both containers and iterators to be used with the :keyword:`for` and :keyword:`in` statements. - This method corresponds to the :attr:`tp_iter` slot of the type structure for + This method corresponds to the :c:member:`~PyTypeObject.tp_iter` slot of the type structure for Python objects in the Python/C API. @@ -633,7 +633,7 @@ Return the next item from the container. If there are no further items, raise the :exc:`StopIteration` exception. This method corresponds to the - :attr:`tp_iternext` slot of the type structure for Python objects in the + :c:member:`~PyTypeObject.tp_iternext` slot of the type structure for Python objects in the Python/C API. Python defines several iterator objects to support iteration over general and diff --git a/Doc/whatsnew/2.2.rst b/Doc/whatsnew/2.2.rst --- a/Doc/whatsnew/2.2.rst +++ b/Doc/whatsnew/2.2.rst @@ -450,9 +450,9 @@ Python classes can define an :meth:`__iter__` method, which should create and return a new iterator for the object; if the object is its own iterator, this method can just return ``self``. In particular, iterators will usually be their -own iterators. Extension types implemented in C can implement a :attr:`tp_iter` +own iterators. Extension types implemented in C can implement a :c:member:`~PyTypeObject.tp_iter` function in order to return an iterator, and extension types that want to behave -as iterators can define a :attr:`tp_iternext` function. +as iterators can define a :c:member:`~PyTypeObject.tp_iternext` function. So, after all this, what do iterators actually do? They have one required method, :meth:`next`, which takes no arguments and returns the next value. When @@ -478,7 +478,7 @@ In 2.2, Python's :keyword:`for` statement no longer expects a sequence; it expects something for which :func:`iter` will return an iterator. For backward compatibility and convenience, an iterator is automatically constructed for -sequences that don't implement :meth:`__iter__` or a :attr:`tp_iter` slot, so +sequences that don't implement :meth:`__iter__` or a :c:member:`~PyTypeObject.tp_iter` slot, so ``for i in [1,2,3]`` will still work. Wherever the Python interpreter loops over a sequence, it's been changed to use the iterator protocol. This means you can do things like this:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 22:07:15 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 1 Aug 2013 22:07:15 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2310241=3A_Clear_ex?= =?utf-8?q?tension_module_dict_copies_at_interpreter_shutdown=2E?= Message-ID: <3c5jGC6yC7zP6M@mail.python.org> http://hg.python.org/cpython/rev/62658d9d8926 changeset: 84958:62658d9d8926 parent: 84956:36ff479e429c user: Antoine Pitrou date: Thu Aug 01 22:07:06 2013 +0200 summary: Issue #10241: Clear extension module dict copies at interpreter shutdown. Patch by Neil Schemenauer, minimally modified. files: Include/pystate.h | 3 +++ Misc/NEWS | 3 +++ Python/import.c | 2 ++ Python/pystate.c | 25 +++++++++++++++++++++++++ 4 files changed, 33 insertions(+), 0 deletions(-) diff --git a/Include/pystate.h b/Include/pystate.h --- a/Include/pystate.h +++ b/Include/pystate.h @@ -134,6 +134,9 @@ PyAPI_FUNC(int) PyState_RemoveModule(struct PyModuleDef*); #endif PyAPI_FUNC(PyObject*) PyState_FindModule(struct PyModuleDef*); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyState_ClearModules(void); +#endif PyAPI_FUNC(PyThreadState *) PyThreadState_New(PyInterpreterState *); PyAPI_FUNC(PyThreadState *) _PyThreadState_Prealloc(PyInterpreterState *); diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #10241: Clear extension module dict copies at interpreter shutdown. + Patch by Neil Schemenauer, minimally modified. + - Issue #9035: ismount now recognises volumes mounted below a drive root on Windows. Original patch by Atsuo Ishimoto. diff --git a/Python/import.c b/Python/import.c --- a/Python/import.c +++ b/Python/import.c @@ -380,6 +380,8 @@ builtins = interp->builtins; interp->builtins = PyDict_New(); Py_DECREF(builtins); + /* Clear module dict copies stored in the interpreter state */ + _PyState_ClearModules(); /* Collect references */ _PyGC_CollectNoFail(); /* Dump GC stats before it's too late, since it uses the warnings diff --git a/Python/pystate.c b/Python/pystate.c --- a/Python/pystate.c +++ b/Python/pystate.c @@ -320,6 +320,31 @@ return PyList_SetItem(state->modules_by_index, index, Py_None); } +/* used by import.c:PyImport_Cleanup */ +void +_PyState_ClearModules(void) +{ + PyInterpreterState *state = PyThreadState_GET()->interp; + if (state->modules_by_index) { + Py_ssize_t i; + for (i = 0; i < PyList_GET_SIZE(state->modules_by_index); i++) { + PyObject *m = PyList_GET_ITEM(state->modules_by_index, i); + if (PyModule_Check(m)) { + /* cleanup the saved copy of module dicts */ + PyModuleDef *md = PyModule_GetDef(m); + if (md) + Py_CLEAR(md->m_base.m_copy); + } + } + /* Setting modules_by_index to NULL could be dangerous, so we + clear the list instead. */ + if (PyList_SetSlice(state->modules_by_index, + 0, PyList_GET_SIZE(state->modules_by_index), + NULL)) + PyErr_WriteUnraisable(state->modules_by_index); + } +} + void PyThreadState_Clear(PyThreadState *tstate) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 22:25:43 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 1 Aug 2013 22:25:43 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_Fix_tkinter_re?= =?utf-8?q?gression_introduced_by_the_security_fix_in_=2316248=2E?= Message-ID: <3c5jgW0qn0z7LkV@mail.python.org> http://hg.python.org/cpython/rev/0f17aed78168 changeset: 84959:0f17aed78168 branch: 2.7 parent: 84957:bb546f6d8ab4 user: Antoine Pitrou date: Thu Aug 01 22:25:12 2013 +0200 summary: Fix tkinter regression introduced by the security fix in #16248. files: Lib/lib-tk/Tkinter.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/lib-tk/Tkinter.py b/Lib/lib-tk/Tkinter.py --- a/Lib/lib-tk/Tkinter.py +++ b/Lib/lib-tk/Tkinter.py @@ -1736,7 +1736,7 @@ # ensure that self.tk is always _something_. self.tk = None if baseName is None: - import sys, os + import os baseName = os.path.basename(sys.argv[0]) baseName, ext = os.path.splitext(baseName) if ext not in ('.py', '.pyc', '.pyo'): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 1 23:32:21 2013 From: python-checkins at python.org (terry.reedy) Date: Thu, 1 Aug 2013 23:32:21 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_8_spelling_corrections?= Message-ID: <3c5l8P2LBcz7Ljy@mail.python.org> http://hg.python.org/peps/rev/d3efcc9e612e changeset: 5015:d3efcc9e612e user: Terry Jan Reedy date: Thu Aug 01 17:32:07 2013 -0400 summary: PEP 8 spelling corrections files: pep-0008.txt | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -300,7 +300,7 @@ However, explicit relative imports are an acceptable alternative to absolute imports, especially when dealing with complex package layouts - where using absolute imports would be unecessarily verbose:: + where using absolute imports would be unnecessarily verbose:: from . import sibling from .sibling import example @@ -394,7 +394,7 @@ - If operators with different priorities are used, consider adding whitespace around the operators with the lowest priority(ies). Use - your own judgement; however, never use more than one space, and + your own judgment; however, never use more than one space, and always have the same amount of whitespace on both sides of a binary operator. @@ -914,7 +914,7 @@ Class naming conventions apply here, although you should add the suffix "Error" to your exception classes if the exception is an error. Non-error exceptions that are used for non-local flow control - or other forms of signalling need no special suffix. + or other forms of signaling need no special suffix. - Use exception chaining appropriately. In Python 3, "raise X from Y" should be used to indicate explicit replacement without losing the -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 00:03:12 2013 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 2 Aug 2013 00:03:12 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogQWRkIDIuNy41IHV1?= =?utf-8?q?id?= Message-ID: <3c5lr02DS1z7LjP@mail.python.org> http://hg.python.org/cpython/rev/7d18e3a00df6 changeset: 84960:7d18e3a00df6 branch: 2.7 user: Martin v. L?wis date: Thu Aug 01 23:55:26 2013 +0200 summary: Add 2.7.5 uuid files: Tools/msi/uuids.py | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Tools/msi/uuids.py b/Tools/msi/uuids.py --- a/Tools/msi/uuids.py +++ b/Tools/msi/uuids.py @@ -60,4 +60,5 @@ '2.7.3150':'{C0C31BCC-56FB-42a7-8766-D29E1BD74C7C}', # 2.7.3 '2.7.4121':'{47F45F45-72D7-4e54-AF41-26767EDE95CF}', # 2.7.4rc1 '2.7.4150':'{84ADC96C-B7E0-4938-9D6E-2B640D5DA224}', # 2.7.4 + '2.7.5150':'{DBDD570E-0952-475f-9453-AB88F3DD5659}', # 2.7.5 } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 00:03:13 2013 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 2 Aug 2013 00:03:13 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogQWRkIDIuNy42IHV1?= =?utf-8?q?ids=2E?= Message-ID: <3c5lr13wm2z7LjV@mail.python.org> http://hg.python.org/cpython/rev/3bd55ec317a7 changeset: 84961:3bd55ec317a7 branch: 2.7 user: Martin v. L?wis date: Thu Aug 01 23:57:21 2013 +0200 summary: Add 2.7.6 uuids. files: Tools/msi/uuids.py | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Tools/msi/uuids.py b/Tools/msi/uuids.py --- a/Tools/msi/uuids.py +++ b/Tools/msi/uuids.py @@ -61,4 +61,6 @@ '2.7.4121':'{47F45F45-72D7-4e54-AF41-26767EDE95CF}', # 2.7.4rc1 '2.7.4150':'{84ADC96C-B7E0-4938-9D6E-2B640D5DA224}', # 2.7.4 '2.7.5150':'{DBDD570E-0952-475f-9453-AB88F3DD5659}', # 2.7.5 + '2.7.6121':'{D1EBC07F-A7B1-4163-83DB-AE813CEF392F}', # 2.7.6rc1 + '2.7.6150':'{C3CC4DF5-39A5-4027-B136-2B3E1F5AB6E2}', # 2.7.6 } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 00:09:15 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 2 Aug 2013 00:09:15 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Fix_the_RLIMIT=5FCORE_reso?= =?utf-8?q?urce_lowering_logic_in_test=5Fsubprocess=2E?= Message-ID: <3c5lyz117yz7LjV@mail.python.org> http://hg.python.org/cpython/rev/dae93ae6b713 changeset: 84962:dae93ae6b713 parent: 84958:62658d9d8926 user: Antoine Pitrou date: Fri Aug 02 00:08:51 2013 +0200 summary: Fix the RLIMIT_CORE resource lowering logic in test_subprocess. We must only lower the soft limit, since lowering the hard limit makes it impossible to raise it back at the end. (this could prevent core dumps from being generated when the test suite crashes) files: Lib/test/test_subprocess.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -1123,7 +1123,7 @@ if resource is not None: try: self.old_limit = resource.getrlimit(resource.RLIMIT_CORE) - resource.setrlimit(resource.RLIMIT_CORE, (0, 0)) + resource.setrlimit(resource.RLIMIT_CORE, (0, self.old_limit[1])) except (ValueError, resource.error): pass -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 00:41:06 2013 From: python-checkins at python.org (ned.deily) Date: Fri, 2 Aug 2013 00:41:06 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Fix_OS_X_installer_build_s?= =?utf-8?q?cript_to_handle_the_pretty-printed_=5Fsysconfigdata=2Epy?= Message-ID: <3c5mgk1wQqz7LjV@mail.python.org> http://hg.python.org/cpython/rev/3ffd9a270e14 changeset: 84963:3ffd9a270e14 user: Ned Deily date: Thu Aug 01 15:39:47 2013 -0700 summary: Fix OS X installer build script to handle the pretty-printed _sysconfigdata.py when removing references to temporary build files. This is not an elegant solution but it does isolate changes to just build-installer.py. files: Mac/BuildScript/build-installer.py | 56 +++++++++++++++-- 1 files changed, 48 insertions(+), 8 deletions(-) diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -982,22 +982,62 @@ # the end-users system. Also remove the directories from _sysconfigdata.py # (added in 3.3) if it exists. + include_path = '-I%s/libraries/usr/local/include' % (WORKDIR,) + lib_path = '-L%s/libraries/usr/local/lib' % (WORKDIR,) + path_to_lib = os.path.join(rootDir, 'Library', 'Frameworks', 'Python.framework', 'Versions', version, 'lib', 'python%s'%(version,)) - paths = [os.path.join(path_to_lib, 'config' + config_suffix, 'Makefile'), - os.path.join(path_to_lib, '_sysconfigdata.py')] - for path in paths: - if not os.path.exists(path): - continue + + # fix Makefile + path = os.path.join(path_to_lib, 'config' + config_suffix, 'Makefile') + fp = open(path, 'r') + data = fp.read() + fp.close() + + for p in (include_path, lib_path): + data = data.replace(" " + p, '') + data = data.replace(p + " ", '') + + fp = open(path, 'w') + fp.write(data) + fp.close() + + # fix _sysconfigdata if it exists + # + # TODO: make this more robust! test_sysconfig_module of + # distutils.tests.test_sysconfig.SysconfigTestCase tests that + # the output from get_config_var in both sysconfig and + # distutils.sysconfig is exactly the same for both CFLAGS and + # LDFLAGS. The fixing up is now complicated by the pretty + # printing in _sysconfigdata.py. Also, we are using the + # pprint from the Python running the installer build which + # may not cosmetically format the same as the pprint in the Python + # being built (and which is used to originally generate + # _sysconfigdata.py). + + import pprint + path = os.path.join(path_to_lib, '_sysconfigdata.py') + if os.path.exists(path): fp = open(path, 'r') data = fp.read() fp.close() + # create build_time_vars dict + exec(data) + vars = {} + for k, v in build_time_vars.items(): + if type(v) == type(''): + for p in (include_path, lib_path): + v = v.replace(' ' + p, '') + v = v.replace(p + ' ', '') + vars[k] = v - data = data.replace(' -L%s/libraries/usr/local/lib'%(WORKDIR,), '') - data = data.replace(' -I%s/libraries/usr/local/include'%(WORKDIR,), '') fp = open(path, 'w') - fp.write(data) + # duplicated from sysconfig._generate_posix_vars() + fp.write('# system configuration generated and used by' + ' the sysconfig module\n') + fp.write('build_time_vars = ') + pprint.pprint(vars, stream=fp) fp.close() # Add symlinks in /usr/local/bin, using relative links -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 00:49:45 2013 From: python-checkins at python.org (victor.stinner) Date: Fri, 2 Aug 2013 00:49:45 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318609=2C_=2318408?= =?utf-8?q?=3A_=5Fctypes=5Fadd=5Ftraceback=28=29_now_clears_the_current_ex?= =?utf-8?q?ception?= Message-ID: <3c5msj24spz7Ljc@mail.python.org> http://hg.python.org/cpython/rev/de1ea7f84e08 changeset: 84964:de1ea7f84e08 user: Victor Stinner date: Fri Aug 02 00:47:47 2013 +0200 summary: Issue #18609, #18408: _ctypes_add_traceback() now clears the current exception while adding the traceback, because it may call indirectly a Python function and Python functions must not be called with an exception set. In the case of the issue #18609, _ctypes_add_traceback() called the iso8859-1 decoder which is implemented in Python. Python has a ISO-8859-1 codec implemented in C. It is not used because PyUnicode_Decode() only uses the C codec for other names (aliases) of this codec ("latin-1", "latin1" and "iso-8859-1"). files: Modules/_ctypes/callbacks.c | 23 ++++++++++++++++++++--- 1 files changed, 20 insertions(+), 3 deletions(-) diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -98,20 +98,37 @@ PyObject *py_globals = 0; PyCodeObject *py_code = 0; PyFrameObject *py_frame = 0; + PyObject *exception, *value, *tb; + + /* (Save and) Clear the current exception. Python functions must not be + called with an exception set. Calling Python functions happens when + the codec of the filesystem encoding is implemented in pure Python. */ + PyErr_Fetch(&exception, &value, &tb); py_globals = PyDict_New(); - if (!py_globals) goto bad; + if (!py_globals) + goto bad; py_code = PyCode_NewEmpty(filename, funcname, lineno); - if (!py_code) goto bad; + if (!py_code) + goto bad; py_frame = PyFrame_New( PyThreadState_Get(), /*PyThreadState *tstate,*/ py_code, /*PyCodeObject *code,*/ py_globals, /*PyObject *globals,*/ 0 /*PyObject *locals*/ ); - if (!py_frame) goto bad; + if (!py_frame) + goto bad; py_frame->f_lineno = lineno; + + PyErr_Restore(exception, value, tb); PyTraceBack_Here(py_frame); + + Py_DECREF(py_globals); + Py_DECREF(py_code); + Py_DECREF(py_frame); + return; + bad: Py_XDECREF(py_globals); Py_XDECREF(py_code); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 01:48:33 2013 From: python-checkins at python.org (victor.stinner) Date: Fri, 2 Aug 2013 01:48:33 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318519=3A_Fix_test?= =?utf-8?q?=5Fsqlite_on_old_versions_of_libsqlite3?= Message-ID: <3c5p9Y4Kgnz7Ljd@mail.python.org> http://hg.python.org/cpython/rev/c73f4dced6aa changeset: 84965:c73f4dced6aa user: Victor Stinner date: Fri Aug 02 01:48:10 2013 +0200 summary: Issue #18519: Fix test_sqlite on old versions of libsqlite3 With old SQLite versions, _sqlite3_result_error() sets a new Python exception, so don't restore the previous exception. files: Modules/_sqlite/connection.c | 15 ++++++++++++--- 1 files changed, 12 insertions(+), 3 deletions(-) diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -698,6 +698,7 @@ _Py_IDENTIFIER(finalize); int ok; PyObject *exception, *value, *tb; + int restore; #ifdef WITH_THREAD PyGILState_STATE threadstate; @@ -715,6 +716,7 @@ /* Keep the exception (if any) of the last call to step() */ PyErr_Fetch(&exception, &value, &tb); + restore = 1; function_result = _PyObject_CallMethodId(*aggregate_instance, &PyId_finalize, ""); @@ -732,11 +734,18 @@ PyErr_Clear(); } _sqlite3_result_error(context, "user-defined aggregate's 'finalize' method raised error", -1); +#if SQLITE_VERSION_NUMBER < 3003003 + /* with old SQLite versions, _sqlite3_result_error() sets a new Python + exception, so don't restore the previous exception */ + restore = 0; +#endif } - /* Restore the exception (if any) of the last call to step(), - but clear also the current exception if finalize() failed */ - PyErr_Restore(exception, value, tb); + if (restore) { + /* Restore the exception (if any) of the last call to step(), + but clear also the current exception if finalize() failed */ + PyErr_Restore(exception, value, tb); + } error: #ifdef WITH_THREAD -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 03:21:51 2013 From: python-checkins at python.org (larry.hastings) Date: Fri, 2 Aug 2013 03:21:51 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2317899=3A_Fix_rare?= =?utf-8?q?_file_descriptor_leak_in_os=2Elistdir=28=29=2E?= Message-ID: <3c5rFC0vCBz7Lkx@mail.python.org> http://hg.python.org/cpython/rev/e51cbc45f4ca changeset: 84966:e51cbc45f4ca user: Larry Hastings date: Thu Aug 01 18:18:56 2013 -0700 summary: Issue #17899: Fix rare file descriptor leak in os.listdir(). files: Misc/NEWS | 2 ++ Modules/posixmodule.c | 14 ++++++++++++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #17899: Fix rare file descriptor leak in os.listdir(). + - Issue #10241: Clear extension module dict copies at interpreter shutdown. Patch by Neil Schemenauer, minimally modified. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -3420,12 +3420,13 @@ static PyObject * _posix_listdir(path_t *path, PyObject *list) { - int fd = -1; - PyObject *v; DIR *dirp = NULL; struct dirent *ep; int return_str; /* if false, return bytes */ +#ifdef HAVE_FDOPENDIR + int fd = -1; +#endif errno = 0; #ifdef HAVE_FDOPENDIR @@ -3467,6 +3468,13 @@ if (dirp == NULL) { list = path_error(path); +#ifdef HAVE_FDOPENDIR + if (fd != -1) { + Py_BEGIN_ALLOW_THREADS + close(fd); + Py_END_ALLOW_THREADS + } +#endif goto exit; } if ((list = PyList_New(0)) == NULL) { @@ -3509,8 +3517,10 @@ exit: if (dirp != NULL) { Py_BEGIN_ALLOW_THREADS +#ifdef HAVE_FDOPENDIR if (fd > -1) rewinddir(dirp); +#endif closedir(dirp); Py_END_ALLOW_THREADS } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 04:26:41 2013 From: python-checkins at python.org (nick.coghlan) Date: Fri, 2 Aug 2013 04:26:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Additional_PEP_8_tweaks?= Message-ID: <3c5sh155Jpz7Llf@mail.python.org> http://hg.python.org/peps/rev/59e060c2ffe7 changeset: 5016:59e060c2ffe7 user: Nick Coghlan date: Fri Aug 02 12:26:24 2013 +1000 summary: Additional PEP 8 tweaks - be explicit that project style guides take precedence - make a line splitting example consistent with other guidelines - clarify wording of the def-vs-lambda guideline files: pep-0008.txt | 12 ++++++++---- 1 files changed, 8 insertions(+), 4 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -28,6 +28,10 @@ identified and past conventions are rendered obsolete by changes in the language itself. +Many projects incorporate this guide by reference into their own style +guides. In the event of any conflicts, the project-specific style guide +takes precedence. + A Foolish Consistency is the Hobgoblin of Little Minds ====================================================== @@ -201,8 +205,8 @@ def __init__(self, width, height, color='black', emphasis=None, highlight=0): if (width == 0 and height == 0 and - color == 'red' and emphasis == 'strong' or - highlight > 100): + color == 'red' and emphasis == 'strong' or + highlight > 100): raise ValueError("sorry, you lose") if width == 0 and height == 0 and (color == 'red' or emphasis is None): @@ -882,8 +886,8 @@ operator. However, it is best to implement all six operations so that confusion doesn't arise in other contexts. -- Always use a def statement instead of assigning a lambda expression - to a name. +- Always use a def statement instead of an assignment statement that binds + a lambda expression directly to a name. Yes:: -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 04:30:45 2013 From: python-checkins at python.org (nick.coghlan) Date: Fri, 2 Aug 2013 04:30:45 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Always_defer_to_project_speci?= =?utf-8?q?fic_guides?= Message-ID: <3c5smj6MMVz7Ljf@mail.python.org> http://hg.python.org/peps/rev/65d15d968219 changeset: 5017:65d15d968219 user: Nick Coghlan date: Fri Aug 02 12:30:37 2013 +1000 summary: Always defer to project specific guides files: pep-0008.txt | 5 ++--- 1 files changed, 2 insertions(+), 3 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -28,9 +28,8 @@ identified and past conventions are rendered obsolete by changes in the language itself. -Many projects incorporate this guide by reference into their own style -guides. In the event of any conflicts, the project-specific style guide -takes precedence. +Many projects have their own coding style guidelines. In the event of any +conflicts, such project-specific guides take precedence for that project. A Foolish Consistency is the Hobgoblin of Little Minds -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 04:39:14 2013 From: python-checkins at python.org (larry.hastings) Date: Fri, 2 Aug 2013 04:39:14 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE3ODk5?= =?utf-8?q?=3A_Fix_rare_file_descriptor_leak_in_os=2Elistdir=28=29=2E?= Message-ID: <3c5syV5GPDz7Ljf@mail.python.org> http://hg.python.org/cpython/rev/bf68711bc939 changeset: 84967:bf68711bc939 branch: 3.3 parent: 84955:a381721299a3 user: Larry Hastings date: Thu Aug 01 19:34:46 2013 -0700 summary: Issue #17899: Fix rare file descriptor leak in os.listdir(). (Done as separate patch from trunk as the code has diverged quite a bit.) files: Misc/NEWS | 2 ++ Modules/posixmodule.c | 11 +++++++++++ 2 files changed, 13 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -12,6 +12,8 @@ Core and Builtins ----------------- +- Issue #17899: Fix rare file descriptor leak in os.listdir(). + - Issue #18552: Check return value of PyArena_AddPyObject() in obj2ast_object(). diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -3443,7 +3443,9 @@ path_t path; PyObject *list = NULL; static char *keywords[] = {"path", NULL}; +#ifdef HAVE_FDOPENDIR int fd = -1; +#endif /* HAVE_FDOPENDIR */ #if defined(MS_WINDOWS) && !defined(HAVE_OPENDIR) PyObject *v; @@ -3732,6 +3734,13 @@ if (dirp == NULL) { list = path_error("listdir", &path); +#ifdef HAVE_FDOPENDIR + if (fd != -1) { + Py_BEGIN_ALLOW_THREADS + close(fd); + Py_END_ALLOW_THREADS + } +#endif /* HAVE_FDOPENDIR */ goto exit; } if ((list = PyList_New(0)) == NULL) { @@ -3774,8 +3783,10 @@ exit: if (dirp != NULL) { Py_BEGIN_ALLOW_THREADS +#ifdef HAVE_FDOPENDIR if (fd > -1) rewinddir(dirp); +#endif /* HAVE_FDOPENDIR */ closedir(dirp); Py_END_ALLOW_THREADS } -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Fri Aug 2 05:48:24 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Fri, 02 Aug 2013 05:48:24 +0200 Subject: [Python-checkins] Daily reference leaks (e51cbc45f4ca): sum=0 Message-ID: results for e51cbc45f4ca on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogqHM7qL', '-x'] From python-checkins at python.org Fri Aug 2 06:38:10 2013 From: python-checkins at python.org (ned.deily) Date: Fri, 2 Aug 2013 06:38:10 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE3NTU3?= =?utf-8?q?=3A_Fix_os=2Egetgroups=28=29_to_work_with_the_modified_behavior?= =?utf-8?q?_of?= Message-ID: <3c5wbk1Nblz7LjT@mail.python.org> http://hg.python.org/cpython/rev/6d3b7e0559a0 changeset: 84968:6d3b7e0559a0 branch: 2.7 parent: 84961:3bd55ec317a7 user: Ned Deily date: Thu Aug 01 21:19:09 2013 -0700 summary: Issue #17557: Fix os.getgroups() to work with the modified behavior of getgroups(2) on OS X 10.8. Original patch by Mateusz Lenik. files: Misc/ACKS | 1 + Misc/NEWS | 3 +++ Modules/posixmodule.c | 30 ++++++++++++++++++++++++++++++ 3 files changed, 34 insertions(+), 0 deletions(-) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -599,6 +599,7 @@ Luke Kenneth Casson Leighton Tshepang Lekhonkhobe Marc-Andr? Lemburg +Mateusz Lenik John Lenton Kostyantyn Leschenko Christopher Tur Lesniewski-Laas diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -26,6 +26,9 @@ Library ------- +- Issue #17557: Fix os.getgroups() to work with the modified behavior of + getgroups(2) on OS X 10.8. Original patch by Mateusz Lenik. + - Issue #18455: multiprocessing should not retry connect() with same socket. - Issue #18513: Fix behaviour of cmath.rect w.r.t. signed zeros on OS X 10.8 + diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4054,6 +4054,34 @@ gid_t* alt_grouplist = grouplist; int n; +#ifdef __APPLE__ + /* Issue #17557: As of OS X 10.8, getgroups(2) no longer raises EINVAL if + * there are more groups than can fit in grouplist. Therefore, on OS X + * always first call getgroups with length 0 to get the actual number + * of groups. + */ + n = getgroups(0, NULL); + if (n < 0) { + return posix_error(); + } else if (n <= MAX_GROUPS) { + /* groups will fit in existing array */ + alt_grouplist = grouplist; + } else { + alt_grouplist = PyMem_Malloc(n * sizeof(gid_t)); + if (alt_grouplist == NULL) { + errno = EINVAL; + return posix_error(); + } + } + + n = getgroups(n, alt_grouplist); + if (n == -1) { + if (alt_grouplist != grouplist) { + PyMem_Free(alt_grouplist); + } + return posix_error(); + } +#else n = getgroups(MAX_GROUPS, grouplist); if (n < 0) { if (errno == EINVAL) { @@ -4080,6 +4108,8 @@ return posix_error(); } } +#endif + result = PyList_New(n); if (result != NULL) { int i; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 06:38:11 2013 From: python-checkins at python.org (ned.deily) Date: Fri, 2 Aug 2013 06:38:11 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE3NTU3?= =?utf-8?q?=3A_Fix_os=2Egetgroups=28=29_to_work_with_the_modified_behavior?= =?utf-8?q?_of?= Message-ID: <3c5wbl3R20z7Ljd@mail.python.org> http://hg.python.org/cpython/rev/0a4afa8833b5 changeset: 84969:0a4afa8833b5 branch: 3.3 parent: 84967:bf68711bc939 user: Ned Deily date: Thu Aug 01 21:21:15 2013 -0700 summary: Issue #17557: Fix os.getgroups() to work with the modified behavior of getgroups(2) on OS X 10.8. Original patch by Mateusz Lenik. files: Misc/ACKS | 1 + Misc/NEWS | 3 +++ Modules/posixmodule.c | 30 ++++++++++++++++++++++++++++++ 3 files changed, 34 insertions(+), 0 deletions(-) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -721,6 +721,7 @@ Luke Kenneth Casson Leighton Tshepang Lekhonkhobe Marc-Andr? Lemburg +Mateusz Lenik John Lenton Kostyantyn Leschenko Christopher Tur Lesniewski-Laas diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -59,6 +59,9 @@ Library ------- +- Issue #17557: Fix os.getgroups() to work with the modified behavior of + getgroups(2) on OS X 10.8. Original patch by Mateusz Lenik. + - Issue #18599: Fix name attribute of _sha1.sha1() object. It now returns 'SHA1' instead of 'SHA'. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -6331,6 +6331,34 @@ gid_t* alt_grouplist = grouplist; int n; +#ifdef __APPLE__ + /* Issue #17557: As of OS X 10.8, getgroups(2) no longer raises EINVAL if + * there are more groups than can fit in grouplist. Therefore, on OS X + * always first call getgroups with length 0 to get the actual number + * of groups. + */ + n = getgroups(0, NULL); + if (n < 0) { + return posix_error(); + } else if (n <= MAX_GROUPS) { + /* groups will fit in existing array */ + alt_grouplist = grouplist; + } else { + alt_grouplist = PyMem_Malloc(n * sizeof(gid_t)); + if (alt_grouplist == NULL) { + errno = EINVAL; + return posix_error(); + } + } + + n = getgroups(n, alt_grouplist); + if (n == -1) { + if (alt_grouplist != grouplist) { + PyMem_Free(alt_grouplist); + } + return posix_error(); + } +#else n = getgroups(MAX_GROUPS, grouplist); if (n < 0) { if (errno == EINVAL) { @@ -6357,6 +6385,8 @@ return posix_error(); } } +#endif + result = PyList_New(n); if (result != NULL) { int i; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 06:38:12 2013 From: python-checkins at python.org (ned.deily) Date: Fri, 2 Aug 2013 06:38:12 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2317557=3A_merge_from_3=2E3?= Message-ID: <3c5wbm5g0jz7Ll8@mail.python.org> http://hg.python.org/cpython/rev/634a8e8816d4 changeset: 84970:634a8e8816d4 parent: 84966:e51cbc45f4ca parent: 84969:0a4afa8833b5 user: Ned Deily date: Thu Aug 01 21:37:17 2013 -0700 summary: Issue #17557: merge from 3.3 files: Misc/ACKS | 1 + Misc/NEWS | 3 +++ Modules/posixmodule.c | 30 ++++++++++++++++++++++++++++++ 3 files changed, 34 insertions(+), 0 deletions(-) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -737,6 +737,7 @@ Luke Kenneth Casson Leighton Tshepang Lekhonkhobe Marc-Andr? Lemburg +Mateusz Lenik John Lenton Kostyantyn Leschenko Benno Leslie diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -184,6 +184,9 @@ Library ------- +- Issue #17557: Fix os.getgroups() to work with the modified behavior of + getgroups(2) on OS X 10.8. Original patch by Mateusz Lenik. + - Issue #18608: Avoid keeping a strong reference to the locale module inside the _io module. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -5911,6 +5911,34 @@ gid_t* alt_grouplist = grouplist; int n; +#ifdef __APPLE__ + /* Issue #17557: As of OS X 10.8, getgroups(2) no longer raises EINVAL if + * there are more groups than can fit in grouplist. Therefore, on OS X + * always first call getgroups with length 0 to get the actual number + * of groups. + */ + n = getgroups(0, NULL); + if (n < 0) { + return posix_error(); + } else if (n <= MAX_GROUPS) { + /* groups will fit in existing array */ + alt_grouplist = grouplist; + } else { + alt_grouplist = PyMem_Malloc(n * sizeof(gid_t)); + if (alt_grouplist == NULL) { + errno = EINVAL; + return posix_error(); + } + } + + n = getgroups(n, alt_grouplist); + if (n == -1) { + if (alt_grouplist != grouplist) { + PyMem_Free(alt_grouplist); + } + return posix_error(); + } +#else n = getgroups(MAX_GROUPS, grouplist); if (n < 0) { if (errno == EINVAL) { @@ -5937,6 +5965,8 @@ return posix_error(); } } +#endif + result = PyList_New(n); if (result != NULL) { int i; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 07:13:55 2013 From: python-checkins at python.org (ned.deily) Date: Fri, 2 Aug 2013 07:13:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NjI3?= =?utf-8?q?=3A_Fix_typo_noticed_by_Vajrasky_Kok=2E?= Message-ID: <3c5xNz2LYsz7LjT@mail.python.org> http://hg.python.org/cpython/rev/3716d64f846b changeset: 84971:3716d64f846b branch: 3.3 parent: 84969:0a4afa8833b5 user: Ned Deily date: Thu Aug 01 22:12:29 2013 -0700 summary: Issue #18627: Fix typo noticed by Vajrasky Kok. files: Modules/hashlib.h | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/hashlib.h b/Modules/hashlib.h --- a/Modules/hashlib.h +++ b/Modules/hashlib.h @@ -2,7 +2,7 @@ /* * Given a PyObject* obj, fill in the Py_buffer* viewp with the result - * of PyObject_GetBuffer. Sets and exception and issues a return NULL + * of PyObject_GetBuffer. Sets an exception and issues a return NULL * on any errors. */ #define GET_BUFFER_VIEW_OR_ERROUT(obj, viewp) do { \ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 07:13:56 2013 From: python-checkins at python.org (ned.deily) Date: Fri, 2 Aug 2013 07:13:56 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Closes_=2318627=3A_merge_from_3=2E3?= Message-ID: <3c5xP04M6pz7LlF@mail.python.org> http://hg.python.org/cpython/rev/454e9db1c750 changeset: 84972:454e9db1c750 parent: 84970:634a8e8816d4 parent: 84971:3716d64f846b user: Ned Deily date: Thu Aug 01 22:13:30 2013 -0700 summary: Closes #18627: merge from 3.3 files: Modules/hashlib.h | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/hashlib.h b/Modules/hashlib.h --- a/Modules/hashlib.h +++ b/Modules/hashlib.h @@ -2,7 +2,7 @@ /* * Given a PyObject* obj, fill in the Py_buffer* viewp with the result - * of PyObject_GetBuffer. Sets and exception and issues a return NULL + * of PyObject_GetBuffer. Sets an exception and issues a return NULL * on any errors. */ #define GET_BUFFER_VIEW_OR_ERROUT(obj, viewp) do { \ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 08:54:04 2013 From: python-checkins at python.org (ned.deily) Date: Fri, 2 Aug 2013 08:54:04 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NTE3?= =?utf-8?q?=3A_Move_definition_of_=22xxlimited=22_extension_to_detect=5Fmo?= =?utf-8?b?ZHVsZXMoKS4=?= Message-ID: <3c5zcX0w84z7LjV@mail.python.org> http://hg.python.org/cpython/rev/e5607874e8ff changeset: 84973:e5607874e8ff branch: 3.3 parent: 84971:3716d64f846b user: Ned Deily date: Thu Aug 01 23:51:27 2013 -0700 summary: Issue #18517: Move definition of "xxlimited" extension to detect_modules(). files: setup.py | 17 +++++++++-------- 1 files changed, 9 insertions(+), 8 deletions(-) diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -1525,6 +1525,15 @@ if '_tkinter' not in [e.name for e in self.extensions]: missing.append('_tkinter') +## # Uncomment these lines if you want to play with xxmodule.c +## ext = Extension('xx', ['xxmodule.c']) +## self.extensions.append(ext) + + if 'd' not in sys.abiflags: + ext = Extension('xxlimited', ['xxlimited.c'], + define_macros=[('Py_LIMITED_API', 1)]) + self.extensions.append(ext) + return missing def detect_tkinter_darwin(self, inc_dirs, lib_dirs): @@ -1722,14 +1731,6 @@ ) self.extensions.append(ext) -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - if 'd' not in sys.abiflags: - ext = Extension('xxlimited', ['xxlimited.c'], - define_macros=[('Py_LIMITED_API', 1)]) - self.extensions.append(ext) - # XXX handle these, but how to detect? # *** Uncomment and edit for PIL (TkImaging) extension only: # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 08:54:05 2013 From: python-checkins at python.org (ned.deily) Date: Fri, 2 Aug 2013 08:54:05 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318517=3A_merge_from_3=2E3?= Message-ID: <3c5zcY2wxBz7Ljg@mail.python.org> http://hg.python.org/cpython/rev/1d832bc857e2 changeset: 84974:1d832bc857e2 parent: 84972:454e9db1c750 parent: 84973:e5607874e8ff user: Ned Deily date: Thu Aug 01 23:53:24 2013 -0700 summary: Issue #18517: merge from 3.3 files: setup.py | 17 +++++++++-------- 1 files changed, 9 insertions(+), 8 deletions(-) diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -1534,6 +1534,15 @@ if '_tkinter' not in [e.name for e in self.extensions]: missing.append('_tkinter') +## # Uncomment these lines if you want to play with xxmodule.c +## ext = Extension('xx', ['xxmodule.c']) +## self.extensions.append(ext) + + if 'd' not in sys.abiflags: + ext = Extension('xxlimited', ['xxlimited.c'], + define_macros=[('Py_LIMITED_API', 1)]) + self.extensions.append(ext) + return missing def detect_tkinter_darwin(self, inc_dirs, lib_dirs): @@ -1731,14 +1740,6 @@ ) self.extensions.append(ext) -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - if 'd' not in sys.abiflags: - ext = Extension('xxlimited', ['xxlimited.c'], - define_macros=[('Py_LIMITED_API', 1)]) - self.extensions.append(ext) - # XXX handle these, but how to detect? # *** Uncomment and edit for PIL (TkImaging) extension only: # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 10:02:28 2013 From: python-checkins at python.org (charles-francois.natali) Date: Fri, 2 Aug 2013 10:02:28 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4MzI1?= =?utf-8?q?=3A_Fix_a_test=5Fkqueue_failure_on_OpenBSD=3A_kevent=27s_data_a?= =?utf-8?q?nd_event?= Message-ID: <3c617S1pCDz7Lm1@mail.python.org> http://hg.python.org/cpython/rev/8205e72b5cfc changeset: 84975:8205e72b5cfc branch: 2.7 parent: 84968:6d3b7e0559a0 user: Charles-Fran?ois Natali date: Fri Aug 02 10:01:46 2013 +0200 summary: Issue #18325: Fix a test_kqueue failure on OpenBSD: kevent's data and event members are integers. files: Lib/test/test_kqueue.py | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_kqueue.py b/Lib/test/test_kqueue.py --- a/Lib/test/test_kqueue.py +++ b/Lib/test/test_kqueue.py @@ -70,13 +70,13 @@ self.assertEqual(ev, ev) self.assertNotEqual(ev, other) - bignum = sys.maxsize * 2 + 1 - ev = select.kevent(bignum, 1, 2, 3, sys.maxsize, bignum) + bignum = 0x7fff + ev = select.kevent(bignum, 1, 2, 3, bignum - 1, bignum) self.assertEqual(ev.ident, bignum) self.assertEqual(ev.filter, 1) self.assertEqual(ev.flags, 2) self.assertEqual(ev.fflags, 3) - self.assertEqual(ev.data, sys.maxsize) + self.assertEqual(ev.data, bignum - 1) self.assertEqual(ev.udata, bignum) self.assertEqual(ev, ev) self.assertNotEqual(ev, other) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 10:22:56 2013 From: python-checkins at python.org (charles-francois.natali) Date: Fri, 2 Aug 2013 10:22:56 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4MzI1?= =?utf-8?q?=3A_Fix_a_test=5Fkqueue_failure_on_OpenBSD=3A_kevent=27s_data_a?= =?utf-8?q?nd_event?= Message-ID: <3c61b42yzzz7LkQ@mail.python.org> http://hg.python.org/cpython/rev/78db41e4c6a9 changeset: 84976:78db41e4c6a9 branch: 3.3 parent: 84973:e5607874e8ff user: Charles-Fran?ois Natali date: Fri Aug 02 10:21:20 2013 +0200 summary: Issue #18325: Fix a test_kqueue failure on OpenBSD: kevent's data and event members are integers. files: Lib/test/test_kqueue.py | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_kqueue.py b/Lib/test/test_kqueue.py --- a/Lib/test/test_kqueue.py +++ b/Lib/test/test_kqueue.py @@ -75,13 +75,13 @@ self.assertEqual(ev, ev) self.assertNotEqual(ev, other) - bignum = sys.maxsize * 2 + 1 - ev = select.kevent(bignum, 1, 2, 3, sys.maxsize, bignum) + bignum = 0x7fff + ev = select.kevent(bignum, 1, 2, 3, bignum - 1, bignum) self.assertEqual(ev.ident, bignum) self.assertEqual(ev.filter, 1) self.assertEqual(ev.flags, 2) self.assertEqual(ev.fflags, 3) - self.assertEqual(ev.data, sys.maxsize) + self.assertEqual(ev.data, bignum - 1) self.assertEqual(ev.udata, bignum) self.assertEqual(ev, ev) self.assertNotEqual(ev, other) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 10:22:57 2013 From: python-checkins at python.org (charles-francois.natali) Date: Fri, 2 Aug 2013 10:22:57 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318325=3A_Fix_a_test=5Fkqueue_failure_on_OpenBSD?= =?utf-8?q?=3A_kevent=27s_data_and_event?= Message-ID: <3c61b54sF5z7Lm2@mail.python.org> http://hg.python.org/cpython/rev/1287d4c9cd39 changeset: 84977:1287d4c9cd39 parent: 84974:1d832bc857e2 parent: 84976:78db41e4c6a9 user: Charles-Fran?ois Natali date: Fri Aug 02 10:22:07 2013 +0200 summary: Issue #18325: Fix a test_kqueue failure on OpenBSD: kevent's data and event members are integers. files: Lib/test/test_kqueue.py | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_kqueue.py b/Lib/test/test_kqueue.py --- a/Lib/test/test_kqueue.py +++ b/Lib/test/test_kqueue.py @@ -75,13 +75,13 @@ self.assertEqual(ev, ev) self.assertNotEqual(ev, other) - bignum = sys.maxsize * 2 + 1 - ev = select.kevent(bignum, 1, 2, 3, sys.maxsize, bignum) + bignum = 0x7fff + ev = select.kevent(bignum, 1, 2, 3, bignum - 1, bignum) self.assertEqual(ev.ident, bignum) self.assertEqual(ev.filter, 1) self.assertEqual(ev.flags, 2) self.assertEqual(ev.fflags, 3) - self.assertEqual(ev.data, sys.maxsize) + self.assertEqual(ev.data, bignum - 1) self.assertEqual(ev.udata, bignum) self.assertEqual(ev, ev) self.assertNotEqual(ev, other) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 11:13:41 2013 From: python-checkins at python.org (christian.heimes) Date: Fri, 2 Aug 2013 11:13:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbjogRml4IHdhcm5pbmc6IOKAmHB0?= =?utf-8?q?r2=E2=80=99_may_be_used_uninitialized_in_this_function?= Message-ID: <3c62jd29hrz7Ljk@mail.python.org> http://hg.python.org/cpython/rev/52e166a975f9 changeset: 84978:52e166a975f9 user: Christian Heimes date: Fri Aug 02 11:10:51 2013 +0200 summary: Fix warning: ?ptr2? may be used uninitialized in this function files: Modules/_testcapimodule.c | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -2723,6 +2723,7 @@ case PYMEM_DOMAIN_RAW: ptr2 = PyMem_RawRealloc(ptr, size2); break; case PYMEM_DOMAIN_MEM: ptr2 = PyMem_Realloc(ptr, size2); break; case PYMEM_DOMAIN_OBJ: ptr2 = PyObject_Realloc(ptr, size2); break; + default: ptr2 = NULL; break; } if (ptr2 == NULL) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 2 12:19:05 2013 From: python-checkins at python.org (nick.coghlan) Date: Fri, 2 Aug 2013 12:19:05 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Use_Guido=27s_preferred_wordi?= =?utf-8?q?ng_re=3A_line_length?= Message-ID: <3c64955v4yz7Llj@mail.python.org> http://hg.python.org/peps/rev/82e24ac40255 changeset: 5018:82e24ac40255 user: Nick Coghlan date: Fri Aug 02 20:18:45 2013 +1000 summary: Use Guido's preferred wording re: line length files: pep-0008.txt | 19 ++++++++++++++----- 1 files changed, 14 insertions(+), 5 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -162,12 +162,11 @@ Maximum Line Length ------------------- -Aim to limit all lines to a maximum of 79 characters, but up to 99 -characters is acceptable when it improves readability. +Limit all lines to a maximum of 79 characters. For flowing long blocks of text with fewer structural restrictions -(docstrings or comments), limiting the line length to 72 characters -is recommended. +(docstrings or comments), the line length should be limited to 72 +characters. Limiting the required editor window width makes it possible to have several files open side-by-side, and works well when using code @@ -175,10 +174,20 @@ The default wrapping in most tools disrupts the visual structure of the code, making it more difficult to understand. The limits are chosen to -avoid wrapping in editors with the window width set to 80 (or 100), even +avoid wrapping in editors with the window width set to 80, even if the tool places a marker glyph in the final column when wrapping lines. Some web based tools may not offer dynamic line wrapping at all. +Some teams strongly prefer a longer line length. For code maintained +exclusively or primarily by a team that can reach agreement on this +issue, it is okay to increase the line nominal line length from 80 to +100 characters (effectively increasing the maximum length to 99 +characters), provided that comments and docstrings are still wrapped +at 72 characters. + +The Python standard library is conservative and requires limiting +lines to 79 characters (and docstrings/comments to 72). + The preferred way of wrapping long lines is by using Python's implied line continuation inside parentheses, brackets and braces. Long lines can be broken over multiple lines by wrapping expressions in -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 12:20:27 2013 From: python-checkins at python.org (nick.coghlan) Date: Fri, 2 Aug 2013 12:20:27 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Fix_typo?= Message-ID: <3c64Bg592gzN9g@mail.python.org> http://hg.python.org/peps/rev/3b883d6a412e changeset: 5019:3b883d6a412e user: Nick Coghlan date: Fri Aug 02 20:20:17 2013 +1000 summary: Fix typo files: pep-0008.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -180,7 +180,7 @@ Some teams strongly prefer a longer line length. For code maintained exclusively or primarily by a team that can reach agreement on this -issue, it is okay to increase the line nominal line length from 80 to +issue, it is okay to increase the nominal line length from 80 to 100 characters (effectively increasing the maximum length to 99 characters), provided that comments and docstrings are still wrapped at 72 characters. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 12:28:53 2013 From: python-checkins at python.org (nick.coghlan) Date: Fri, 2 Aug 2013 12:28:53 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Tweak_source_encoding_wording?= Message-ID: <3c64NP5zP2z7LjV@mail.python.org> http://hg.python.org/peps/rev/472583f06d4c changeset: 5020:472583f06d4c user: Nick Coghlan date: Fri Aug 02 20:28:35 2013 +1000 summary: Tweak source encoding wording files: pep-0008.txt | 9 +++++---- 1 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -244,14 +244,15 @@ Note, some editors and web-based code viewers may not recognize control-L as a form feed and will show another glyph in its place. -Encodings (PEP 263) -------------------- + +Source File Encoding +-------------------- Code in the core Python distribution should always use UTF-8 (or ASCII in Python 2). -Files using ASCII (in Python 2) or UTF-8 (in Python 3) should not have a -coding cookie. +Files using ASCII (in Python 2) or UTF-8 (in Python 3) should not have +an encoding declaration. In the standard library, non-default encodings should be used only for test purposes or when a comment or docstring needs to mention an author -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 12:46:33 2013 From: python-checkins at python.org (nick.coghlan) Date: Fri, 2 Aug 2013 12:46:33 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Another_PEP_8_typo_fix?= Message-ID: <3c64mn2Q1Bz7Lk3@mail.python.org> http://hg.python.org/peps/rev/086ea6886f57 changeset: 5021:086ea6886f57 user: Nick Coghlan date: Fri Aug 02 20:46:19 2013 +1000 summary: Another PEP 8 typo fix Yay for not saving before committing :P files: pep-0008.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -256,7 +256,7 @@ In the standard library, non-default encodings should be used only for test purposes or when a comment or docstring needs to mention an author -name that that contains non-ASCII characters; otherwise, using ``\x``, +name that contains non-ASCII characters; otherwise, using ``\x``, ``\u``, ``\U``, or ``\N`` escapes is the preferred way to include non-ASCII data in string literals. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 15:06:19 2013 From: python-checkins at python.org (nick.coghlan) Date: Fri, 2 Aug 2013 15:06:19 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_pep-0432=2Etxt?= Message-ID: <3c67t30zykz7Lmj@mail.python.org> http://hg.python.org/peps/rev/54c62ebb0501 changeset: 5022:54c62ebb0501 user: Nick Coghlan date: Fri Aug 02 23:05:54 2013 +1000 summary: pep-0432.txt files: pep-0426.txt | 131 +++++++++++++++++++++++++++++++++++--- pep-0432.txt | 29 ++++++++- 2 files changed, 147 insertions(+), 13 deletions(-) diff --git a/pep-0426.txt b/pep-0426.txt --- a/pep-0426.txt +++ b/pep-0426.txt @@ -86,6 +86,24 @@ "rationale" section at the end of the document, as it would otherwise be an irrelevant distraction for future readers. + +A Note on Time Frames +===================== + +There's a lot of work going on in the Python packaging space at the moment. +In the near term (up until the release of Python 3.4), those efforts will be +focused on the existing metadata standards, both those defined in Python +Enhancement Proposals, and the de facto standards defined by the setuptools +project. + +This PEP is about setting out a longer term goal for the ecosystem that +captures those existing capabilities in a format that is easier to work +with. There are still a number of key open questions (mostly related to +source based distribution), and those won't be able to receive proper +attention from the development community until the other near term +concerns have been resolved. + + Purpose ======= @@ -223,12 +241,16 @@ along with the supporting metadata file formats defined by the ``setuptools`` project. -"Entry points" are a scheme for identifying Python callables or other -objects as strings consisting of a Python module name and a module -attribute name, separated by a colon. For example: ``"test.regrtest:main"``. - -"Distros" is used as the preferred term for Linux distributions, to help -avoid confusion with the Python-specific meaning of the term. +"Distro" is used as the preferred term for Linux distributions, to help +avoid confusion with the Python-specific meaning of the term "distribution". + +"Dist" is the preferred abbreviation for "distributions" in the sense defined +in this PEP. + +"Qualified name" comes from PEP 3155, and refers to the name of an +object relative to its containing module. This is useful for referring +to method definitions on classes, as well as any other attributes of +top level module objects. Integration and deployment of distributions @@ -255,7 +277,10 @@ These three steps may all occur directly on the target system. Alternatively the build step may be separated out by using binary archives provided by the publisher of the distribution, or by creating the binary archives on a -separate system prior to deployment. +separate system prior to deployment. The advantage of the latter approach +is that it minimizes the dependencies that need to be installed on +deployment targets (as the build dependencies will be needed only on the +build systems). The published metadata for distributions SHOULD allow integrators, with the aid of build and integration tools, to: @@ -299,6 +324,25 @@ Standard build system --------------------- +.. note:: + + The standard build system currently described in the PEP is a draft based + on existing practices for projects using distutils or setuptools as their + build system (or other projects, like ``d2to1``, that expose a setup.py + file for backwards compatibility with existing tools) + + The specification doesn't currently cover expected argument support for + the commands, which is a limitation that needs to be addressed before the + PEP can be considered ready for acceptance. + + It is also possible that the "meta build system" will be separated out + into a distinct PEP in the coming months (similar to the separation of + the versioning and requirement specification standard out to PEP 440). + + If a `suitable API can be worked out `__, then it may + even be possible to switch to a more declarative API for build system + specification. + Both development and integration of distributions relies on the ability to build extension modules and perform other operations in a distribution independent manner. @@ -318,10 +362,6 @@ * ``python setup.py bdist_wheel``: create a binary archive from an sdist, source archive or VCS checkout -Future iterations of the metadata and associated PEPs may aim to replace -these ``distutils``/``setuptools`` dependent commands with build system -independent entry points. - Metadata format =============== @@ -436,6 +476,48 @@ be ``pydist-dependencies.json``. +Export metadata +--------------- + +Distributions may define components that are intended for use by other +distributions (such as plugins). As it can be beneficial to know whether or +not a distribution defines any such exports without needing to parse any +metadata, a suitable subset is defined for serialisation to a separate file +in the ``dist-info`` metadata directory. + +The external command metadata consists of the following fields: + +* ``metadata_version`` +* ``generator`` +* ``name`` +* ``version`` +* ``exports`` + +When serialised to a file, the name used for this metadata set SHOULD +be ``pydist-exports.json``. + + +External command metadata +------------------------- + +Distributions may define commands that will be available from the command +line following installation. As it can be beneficial to know whether or not +a distribution has external commands without needing to parse any metadata, +a suitable subset is defined for serialisation to a separate file in the +``dist-info`` metadata directory. + +The external command metadata consists of the following fields: + +* ``metadata_version`` +* ``generator`` +* ``name`` +* ``version`` +* ``commands`` + +When serialised to a file, the name used for this metadata set SHOULD +be ``pydist-commands.json``. + + Included documents ------------------ @@ -1482,6 +1564,33 @@ files later. +Exported interfaces +=================== + +Most Python distributions expose packages and modules for import through +the Python module namespace. + +Extensions to the metadata may be present in a mapping under the +'extensions' key. The keys must meet the same restrictions as +distribution names, while the values may be any type natively supported +in JSON:: + + "extensions" : { + "chili" : { "type" : "Poblano", "heat" : "Mild" }, + "languages" : [ "French", "Italian", "Hebrew" ] + } + +To avoid name conflicts, it is RECOMMENDED that distribution names be used +to identify metadata extensions. This practice will also make it easier to +find authoritative documentation for metadata extensions. + +Metadata extensions allow development tools to record information in the +metadata that may be useful during later phases of distribution. For +example, a build tool could include default build options in a metadata +extension when creating an sdist, and use those when creating the wheel +files later. + + Extras (optional dependencies) ============================== diff --git a/pep-0432.txt b/pep-0432.txt --- a/pep-0432.txt +++ b/pep-0432.txt @@ -3,11 +3,11 @@ Version: $Revision$ Last-Modified: $Date$ Author: Nick Coghlan -Status: Draft +Status: Deferred Type: Standards Track Content-Type: text/x-rst Created: 28-Dec-2012 -Python-Version: 3.4 +Python-Version: 3.5 Post-History: 28-Dec-2012, 2-Jan-2013 @@ -25,6 +25,31 @@ implementation is developed. +PEP Deferral +============ + +Python 3.4 is nearing its first alpha, and already includes a couple of +significant low level changes in PEP 445 (memory allocator customisation) +and PEP 442 (safe object finalization). As a result of the latter PEP, +the shutdown procedure of CPython has also been changed to be more heavily +reliant on the cyclic garbage collector, significantly reducing the +number of modules that will experience the "module globals set to None" +behaviour that is used to deliberate break cycles and attempt to releases +more external resources cleanly. + +Furthermore, I am heavily involved in the current round of updates to the +Python packaging ecosystem (as both the lead author of PEP 426 and +BDFL-delegate for several other PEPs), leaving little to spare to work on +this proposal. The other developers I would trust to lead this effort are +also working on other things. + +So, due to those practical resource constraints, the proximity of Python +3.4 deadlines, and recognition that making too many significant changes to +the low level CPython infrastructure in one release is likely to be unwise, +further work on this PEP has been deferred to the Python 3.5 development +cycle. + + Proposal ======== -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 15:09:41 2013 From: python-checkins at python.org (nick.coghlan) Date: Fri, 2 Aug 2013 15:09:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Backed_out_changeset_54c62ebb?= =?utf-8?q?0501?= Message-ID: <3c67xx5vZxzN9g@mail.python.org> http://hg.python.org/peps/rev/18dcfa66719a changeset: 5023:18dcfa66719a user: Nick Coghlan date: Fri Aug 02 23:08:54 2013 +1000 summary: Backed out changeset 54c62ebb0501 files: pep-0426.txt | 131 +++----------------------------------- pep-0432.txt | 29 +-------- 2 files changed, 13 insertions(+), 147 deletions(-) diff --git a/pep-0426.txt b/pep-0426.txt --- a/pep-0426.txt +++ b/pep-0426.txt @@ -86,24 +86,6 @@ "rationale" section at the end of the document, as it would otherwise be an irrelevant distraction for future readers. - -A Note on Time Frames -===================== - -There's a lot of work going on in the Python packaging space at the moment. -In the near term (up until the release of Python 3.4), those efforts will be -focused on the existing metadata standards, both those defined in Python -Enhancement Proposals, and the de facto standards defined by the setuptools -project. - -This PEP is about setting out a longer term goal for the ecosystem that -captures those existing capabilities in a format that is easier to work -with. There are still a number of key open questions (mostly related to -source based distribution), and those won't be able to receive proper -attention from the development community until the other near term -concerns have been resolved. - - Purpose ======= @@ -241,16 +223,12 @@ along with the supporting metadata file formats defined by the ``setuptools`` project. -"Distro" is used as the preferred term for Linux distributions, to help -avoid confusion with the Python-specific meaning of the term "distribution". - -"Dist" is the preferred abbreviation for "distributions" in the sense defined -in this PEP. - -"Qualified name" comes from PEP 3155, and refers to the name of an -object relative to its containing module. This is useful for referring -to method definitions on classes, as well as any other attributes of -top level module objects. +"Entry points" are a scheme for identifying Python callables or other +objects as strings consisting of a Python module name and a module +attribute name, separated by a colon. For example: ``"test.regrtest:main"``. + +"Distros" is used as the preferred term for Linux distributions, to help +avoid confusion with the Python-specific meaning of the term. Integration and deployment of distributions @@ -277,10 +255,7 @@ These three steps may all occur directly on the target system. Alternatively the build step may be separated out by using binary archives provided by the publisher of the distribution, or by creating the binary archives on a -separate system prior to deployment. The advantage of the latter approach -is that it minimizes the dependencies that need to be installed on -deployment targets (as the build dependencies will be needed only on the -build systems). +separate system prior to deployment. The published metadata for distributions SHOULD allow integrators, with the aid of build and integration tools, to: @@ -324,25 +299,6 @@ Standard build system --------------------- -.. note:: - - The standard build system currently described in the PEP is a draft based - on existing practices for projects using distutils or setuptools as their - build system (or other projects, like ``d2to1``, that expose a setup.py - file for backwards compatibility with existing tools) - - The specification doesn't currently cover expected argument support for - the commands, which is a limitation that needs to be addressed before the - PEP can be considered ready for acceptance. - - It is also possible that the "meta build system" will be separated out - into a distinct PEP in the coming months (similar to the separation of - the versioning and requirement specification standard out to PEP 440). - - If a `suitable API can be worked out `__, then it may - even be possible to switch to a more declarative API for build system - specification. - Both development and integration of distributions relies on the ability to build extension modules and perform other operations in a distribution independent manner. @@ -362,6 +318,10 @@ * ``python setup.py bdist_wheel``: create a binary archive from an sdist, source archive or VCS checkout +Future iterations of the metadata and associated PEPs may aim to replace +these ``distutils``/``setuptools`` dependent commands with build system +independent entry points. + Metadata format =============== @@ -476,48 +436,6 @@ be ``pydist-dependencies.json``. -Export metadata ---------------- - -Distributions may define components that are intended for use by other -distributions (such as plugins). As it can be beneficial to know whether or -not a distribution defines any such exports without needing to parse any -metadata, a suitable subset is defined for serialisation to a separate file -in the ``dist-info`` metadata directory. - -The external command metadata consists of the following fields: - -* ``metadata_version`` -* ``generator`` -* ``name`` -* ``version`` -* ``exports`` - -When serialised to a file, the name used for this metadata set SHOULD -be ``pydist-exports.json``. - - -External command metadata -------------------------- - -Distributions may define commands that will be available from the command -line following installation. As it can be beneficial to know whether or not -a distribution has external commands without needing to parse any metadata, -a suitable subset is defined for serialisation to a separate file in the -``dist-info`` metadata directory. - -The external command metadata consists of the following fields: - -* ``metadata_version`` -* ``generator`` -* ``name`` -* ``version`` -* ``commands`` - -When serialised to a file, the name used for this metadata set SHOULD -be ``pydist-commands.json``. - - Included documents ------------------ @@ -1564,33 +1482,6 @@ files later. -Exported interfaces -=================== - -Most Python distributions expose packages and modules for import through -the Python module namespace. - -Extensions to the metadata may be present in a mapping under the -'extensions' key. The keys must meet the same restrictions as -distribution names, while the values may be any type natively supported -in JSON:: - - "extensions" : { - "chili" : { "type" : "Poblano", "heat" : "Mild" }, - "languages" : [ "French", "Italian", "Hebrew" ] - } - -To avoid name conflicts, it is RECOMMENDED that distribution names be used -to identify metadata extensions. This practice will also make it easier to -find authoritative documentation for metadata extensions. - -Metadata extensions allow development tools to record information in the -metadata that may be useful during later phases of distribution. For -example, a build tool could include default build options in a metadata -extension when creating an sdist, and use those when creating the wheel -files later. - - Extras (optional dependencies) ============================== diff --git a/pep-0432.txt b/pep-0432.txt --- a/pep-0432.txt +++ b/pep-0432.txt @@ -3,11 +3,11 @@ Version: $Revision$ Last-Modified: $Date$ Author: Nick Coghlan -Status: Deferred +Status: Draft Type: Standards Track Content-Type: text/x-rst Created: 28-Dec-2012 -Python-Version: 3.5 +Python-Version: 3.4 Post-History: 28-Dec-2012, 2-Jan-2013 @@ -25,31 +25,6 @@ implementation is developed. -PEP Deferral -============ - -Python 3.4 is nearing its first alpha, and already includes a couple of -significant low level changes in PEP 445 (memory allocator customisation) -and PEP 442 (safe object finalization). As a result of the latter PEP, -the shutdown procedure of CPython has also been changed to be more heavily -reliant on the cyclic garbage collector, significantly reducing the -number of modules that will experience the "module globals set to None" -behaviour that is used to deliberate break cycles and attempt to releases -more external resources cleanly. - -Furthermore, I am heavily involved in the current round of updates to the -Python packaging ecosystem (as both the lead author of PEP 426 and -BDFL-delegate for several other PEPs), leaving little to spare to work on -this proposal. The other developers I would trust to lead this effort are -also working on other things. - -So, due to those practical resource constraints, the proximity of Python -3.4 deadlines, and recognition that making too many significant changes to -the low level CPython infrastructure in one release is likely to be unwise, -further work on this PEP has been deferred to the Python 3.5 development -cycle. - - Proposal ======== -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 15:10:25 2013 From: python-checkins at python.org (nick.coghlan) Date: Fri, 2 Aug 2013 15:10:25 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Defer_PEP_432_=3A=28?= Message-ID: <3c67yn535nz7LmL@mail.python.org> http://hg.python.org/peps/rev/3e287c64da51 changeset: 5024:3e287c64da51 user: Nick Coghlan date: Fri Aug 02 23:10:02 2013 +1000 summary: Defer PEP 432 :( files: pep-0432.txt | 29 +++++++++++++++++++++++++++-- 1 files changed, 27 insertions(+), 2 deletions(-) diff --git a/pep-0432.txt b/pep-0432.txt --- a/pep-0432.txt +++ b/pep-0432.txt @@ -3,11 +3,11 @@ Version: $Revision$ Last-Modified: $Date$ Author: Nick Coghlan -Status: Draft +Status: Deferred Type: Standards Track Content-Type: text/x-rst Created: 28-Dec-2012 -Python-Version: 3.4 +Python-Version: 3.5 Post-History: 28-Dec-2012, 2-Jan-2013 @@ -25,6 +25,31 @@ implementation is developed. +PEP Deferral +============ + +Python 3.4 is nearing its first alpha, and already includes a couple of +significant low level changes in PEP 445 (memory allocator customisation) +and PEP 442 (safe object finalization). As a result of the latter PEP, +the shutdown procedure of CPython has also been changed to be more heavily +reliant on the cyclic garbage collector, significantly reducing the +number of modules that will experience the "module globals set to None" +behaviour that is used to deliberate break cycles and attempt to releases +more external resources cleanly. + +Furthermore, I am heavily involved in the current round of updates to the +Python packaging ecosystem (as both the lead author of PEP 426 and +BDFL-delegate for several other PEPs), leaving little to spare to work on +this proposal. The other developers I would trust to lead this effort are +also working on other things. + +So, due to those practical resource constraints, the proximity of Python +3.4 deadlines, and recognition that making too many significant changes to +the low level CPython infrastructure in one release is likely to be unwise, +further work on this PEP has been deferred to the Python 3.5 development +cycle. + + Proposal ======== -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 17:13:31 2013 From: python-checkins at python.org (guido.van.rossum) Date: Fri, 2 Aug 2013 17:13:31 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Reduce_desirability_of_using_?= =?utf-8?q?100_chars_per_line=2E?= Message-ID: <3c6Bhq0W7Wz7Llj@mail.python.org> http://hg.python.org/peps/rev/bd8c95ad2054 changeset: 5025:bd8c95ad2054 parent: 5014:185a6cf22a00 user: Guido van Rossum date: Thu Aug 01 18:12:07 2013 -0700 summary: Reduce desirability of using 100 chars per line. files: pep-0008.txt | 19 ++++++++++++++----- 1 files changed, 14 insertions(+), 5 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -159,12 +159,11 @@ Maximum Line Length ------------------- -Aim to limit all lines to a maximum of 79 characters, but up to 99 -characters is acceptable when it improves readability. +Limit all lines to a maximum of 79 characters. For flowing long blocks of text with fewer structural restrictions -(docstrings or comments), limiting the line length to 72 characters -is recommended. +(docstrings or comments), the line length should be limited to 72 +characters. Limiting the required editor window width makes it possible to have several files open side-by-side, and works well when using code @@ -172,10 +171,20 @@ The default wrapping in most tools disrupts the visual structure of the code, making it more difficult to understand. The limits are chosen to -avoid wrapping in editors with the window width set to 80 (or 100), even +avoid wrapping in editors with the window width set to 80, even if the tool places a marker glyph in the final column when wrapping lines. Some web based tools may not offer dynamic line wrapping at all. +Some teams strongly prefer a longer line length. For code maintained +exclusively or primarily by a team that can reach agreement on this +issue, it is okay to increase the line nominal line length from 80 to +100 characters (effectively increasing the maximum length to 99 +characters), provided that comments and docstrings are still wrapped +at 72 characters. + +The Python standard library is conservative and requires limiting +lines to 79 characters (and docstrings/comments to 72). + The preferred way of wrapping long lines is by using Python's implied line continuation inside parentheses, brackets and braces. Long lines can be broken over multiple lines by wrapping expressions in -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 17:13:32 2013 From: python-checkins at python.org (guido.van.rossum) Date: Fri, 2 Aug 2013 17:13:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps_=28merge_default_-=3E_default=29?= =?utf-8?q?=3A_Merge?= Message-ID: <3c6Bhr3Fxfz7Ln6@mail.python.org> http://hg.python.org/peps/rev/70b79ccd671a changeset: 5026:70b79ccd671a parent: 5025:bd8c95ad2054 parent: 5024:3e287c64da51 user: Guido van Rossum date: Fri Aug 02 08:13:25 2013 -0700 summary: Merge files: pep-0008.txt | 30 +++++++++++++++++------------- pep-0432.txt | 29 +++++++++++++++++++++++++++-- 2 files changed, 44 insertions(+), 15 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -28,6 +28,9 @@ identified and past conventions are rendered obsolete by changes in the language itself. +Many projects have their own coding style guidelines. In the event of any +conflicts, such project-specific guides take precedence for that project. + A Foolish Consistency is the Hobgoblin of Little Minds ====================================================== @@ -177,7 +180,7 @@ Some teams strongly prefer a longer line length. For code maintained exclusively or primarily by a team that can reach agreement on this -issue, it is okay to increase the line nominal line length from 80 to +issue, it is okay to increase the nominal line length from 80 to 100 characters (effectively increasing the maximum length to 99 characters), provided that comments and docstrings are still wrapped at 72 characters. @@ -210,8 +213,8 @@ def __init__(self, width, height, color='black', emphasis=None, highlight=0): if (width == 0 and height == 0 and - color == 'red' and emphasis == 'strong' or - highlight > 100): + color == 'red' and emphasis == 'strong' or + highlight > 100): raise ValueError("sorry, you lose") if width == 0 and height == 0 and (color == 'red' or emphasis is None): @@ -241,18 +244,19 @@ Note, some editors and web-based code viewers may not recognize control-L as a form feed and will show another glyph in its place. -Encodings (PEP 263) -------------------- + +Source File Encoding +-------------------- Code in the core Python distribution should always use UTF-8 (or ASCII in Python 2). -Files using ASCII (in Python 2) or UTF-8 (in Python 3) should not have a -coding cookie. +Files using ASCII (in Python 2) or UTF-8 (in Python 3) should not have +an encoding declaration. In the standard library, non-default encodings should be used only for test purposes or when a comment or docstring needs to mention an author -name that that contains non-ASCII characters; otherwise, using ``\x``, +name that contains non-ASCII characters; otherwise, using ``\x``, ``\u``, ``\U``, or ``\N`` escapes is the preferred way to include non-ASCII data in string literals. @@ -309,7 +313,7 @@ However, explicit relative imports are an acceptable alternative to absolute imports, especially when dealing with complex package layouts - where using absolute imports would be unecessarily verbose:: + where using absolute imports would be unnecessarily verbose:: from . import sibling from .sibling import example @@ -403,7 +407,7 @@ - If operators with different priorities are used, consider adding whitespace around the operators with the lowest priority(ies). Use - your own judgement; however, never use more than one space, and + your own judgment; however, never use more than one space, and always have the same amount of whitespace on both sides of a binary operator. @@ -891,8 +895,8 @@ operator. However, it is best to implement all six operations so that confusion doesn't arise in other contexts. -- Always use a def statement instead of assigning a lambda expression - to a name. +- Always use a def statement instead of an assignment statement that binds + a lambda expression directly to a name. Yes:: @@ -923,7 +927,7 @@ Class naming conventions apply here, although you should add the suffix "Error" to your exception classes if the exception is an error. Non-error exceptions that are used for non-local flow control - or other forms of signalling need no special suffix. + or other forms of signaling need no special suffix. - Use exception chaining appropriately. In Python 3, "raise X from Y" should be used to indicate explicit replacement without losing the diff --git a/pep-0432.txt b/pep-0432.txt --- a/pep-0432.txt +++ b/pep-0432.txt @@ -3,11 +3,11 @@ Version: $Revision$ Last-Modified: $Date$ Author: Nick Coghlan -Status: Draft +Status: Deferred Type: Standards Track Content-Type: text/x-rst Created: 28-Dec-2012 -Python-Version: 3.4 +Python-Version: 3.5 Post-History: 28-Dec-2012, 2-Jan-2013 @@ -25,6 +25,31 @@ implementation is developed. +PEP Deferral +============ + +Python 3.4 is nearing its first alpha, and already includes a couple of +significant low level changes in PEP 445 (memory allocator customisation) +and PEP 442 (safe object finalization). As a result of the latter PEP, +the shutdown procedure of CPython has also been changed to be more heavily +reliant on the cyclic garbage collector, significantly reducing the +number of modules that will experience the "module globals set to None" +behaviour that is used to deliberate break cycles and attempt to releases +more external resources cleanly. + +Furthermore, I am heavily involved in the current round of updates to the +Python packaging ecosystem (as both the lead author of PEP 426 and +BDFL-delegate for several other PEPs), leaving little to spare to work on +this proposal. The other developers I would trust to lead this effort are +also working on other things. + +So, due to those practical resource constraints, the proximity of Python +3.4 deadlines, and recognition that making too many significant changes to +the low level CPython infrastructure in one release is likely to be unwise, +further work on this PEP has been deferred to the Python 3.5 development +cycle. + + Proposal ======== -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 17:15:21 2013 From: python-checkins at python.org (nick.coghlan) Date: Fri, 2 Aug 2013 17:15:21 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_First_draft_of_entry_points_f?= =?utf-8?q?or_metadata_2=2E0?= Message-ID: <3c6Bkx3btfz7Ln8@mail.python.org> http://hg.python.org/peps/rev/ea3d93e40e02 changeset: 5027:ea3d93e40e02 user: Nick Coghlan date: Sat Aug 03 01:13:56 2013 +1000 summary: First draft of entry points for metadata 2.0 files: pep-0426.txt | 241 ++++++++++++++++++++++- pep-0426/pydist-schema.json | 82 +++++++- 2 files changed, 306 insertions(+), 17 deletions(-) diff --git a/pep-0426.txt b/pep-0426.txt --- a/pep-0426.txt +++ b/pep-0426.txt @@ -86,6 +86,24 @@ "rationale" section at the end of the document, as it would otherwise be an irrelevant distraction for future readers. + +A Note on Time Frames +===================== + +There's a lot of work going on in the Python packaging space at the moment. +In the near term (up until the release of Python 3.4), those efforts will be +focused on the existing metadata standards, both those defined in Python +Enhancement Proposals, and the de facto standards defined by the setuptools +project. + +This PEP is about setting out a longer term goal for the ecosystem that +captures those existing capabilities in a format that is easier to work +with. There are still a number of key open questions (mostly related to +source based distribution), and those won't be able to receive proper +attention from the development community until the other near term +concerns have been resolved. + + Purpose ======= @@ -223,12 +241,16 @@ along with the supporting metadata file formats defined by the ``setuptools`` project. -"Entry points" are a scheme for identifying Python callables or other -objects as strings consisting of a Python module name and a module -attribute name, separated by a colon. For example: ``"test.regrtest:main"``. - -"Distros" is used as the preferred term for Linux distributions, to help -avoid confusion with the Python-specific meaning of the term. +"Distro" is used as the preferred term for Linux distributions, to help +avoid confusion with the Python-specific meaning of the term "distribution". + +"Dist" is the preferred abbreviation for "distributions" in the sense defined +in this PEP. + +"Qualified name" comes from PEP 3155, and refers to the dotted name of an +object relative to its containing module. This is useful for referring +to method definitions on classes, as well as any other attributes of +top level module objects. Integration and deployment of distributions @@ -255,7 +277,10 @@ These three steps may all occur directly on the target system. Alternatively the build step may be separated out by using binary archives provided by the publisher of the distribution, or by creating the binary archives on a -separate system prior to deployment. +separate system prior to deployment. The advantage of the latter approach +is that it minimizes the dependencies that need to be installed on +deployment targets (as the build dependencies will be needed only on the +build systems). The published metadata for distributions SHOULD allow integrators, with the aid of build and integration tools, to: @@ -299,6 +324,25 @@ Standard build system --------------------- +.. note:: + + The standard build system currently described in the PEP is a draft based + on existing practices for projects using distutils or setuptools as their + build system (or other projects, like ``d2to1``, that expose a setup.py + file for backwards compatibility with existing tools) + + The specification doesn't currently cover expected argument support for + the commands, which is a limitation that needs to be addressed before the + PEP can be considered ready for acceptance. + + It is also possible that the "meta build system" will be separated out + into a distinct PEP in the coming months (similar to the separation of + the versioning and requirement specification standard out to PEP 440). + + If a `suitable API can be worked out `__, then it may + even be possible to switch to a more declarative API for build system + specification. + Both development and integration of distributions relies on the ability to build extension modules and perform other operations in a distribution independent manner. @@ -318,10 +362,6 @@ * ``python setup.py bdist_wheel``: create a binary archive from an sdist, source archive or VCS checkout -Future iterations of the metadata and associated PEPs may aim to replace -these ``distutils``/``setuptools`` dependent commands with build system -independent entry points. - Metadata format =============== @@ -436,6 +476,48 @@ be ``pydist-dependencies.json``. +Export metadata +--------------- + +Distributions may define components that are intended for use by other +distributions (such as plugins). As it can be beneficial to know whether or +not a distribution defines any such exports without needing to parse any +metadata, a suitable subset is defined for serialisation to a separate file +in the ``dist-info`` metadata directory. + +The external command metadata consists of the following fields: + +* ``metadata_version`` +* ``generator`` +* ``name`` +* ``version`` +* ``exports`` + +When serialised to a file, the name used for this metadata set SHOULD +be ``pydist-exports.json``. + + +Command metadata +---------------- + +Distributions may define commands that will be available from the command +line following installation. As it can be beneficial to know whether or not +a distribution has such commands without needing to parse any metadata, +a suitable subset is defined for serialisation to a separate file in the +``dist-info`` metadata directory. + +The external command metadata consists of the following fields: + +* ``metadata_version`` +* ``generator`` +* ``name`` +* ``version`` +* ``commands`` + +When serialised to a file, the name used for this metadata set SHOULD +be ``pydist-commands.json``. + + Included documents ------------------ @@ -508,7 +590,7 @@ Example:: - "generator": "setuptools (0.8)" + "generator": "setuptools (0.9)" Name @@ -1348,6 +1430,141 @@ "supports_environments": ["python_version >= '2.6' and sys_platform != 'win32'", "python_version >= '3.3' and sys_platform == 'win32'"] +Installed interfaces +==================== + +Most Python distributions expose packages and modules for import through +the Python module namespace. Distributions may also expose other +interfaces when installed. + +Export specifiers +----------------- + +An export specifier is a string using one of the following formats:: + + module + module:name + module[requires_extra] + module:name[requires_extra] + +The meaning of the subfields is as follows: + +* ``module``: the module providing the export +* ``name``: if applicable, the qualified name of the export within the module +* ``requires_extra``: indicates the export will only work correctly if the + additional dependencies named in the given extra are available. + +Note that installation of extras is not tracked directly: they are merely +a convenient way to refer to a set of dependencies that will be checked for +at runtime. + +.. note:: + + I tried this as a mapping with subfields, and it made the examples below + unreadable. While this PEP is mostly for tool use, readability still + matters to some degree for debugging purposes, and because I expect + snippets of the format to be reused elsewhere. + + +Modules +------- + +A list of module names that the distribution provides for import. + +For names that contain dots, the portion of the name before the final dot +MUST appear either in the installed module list or in the namespace package +list. + +Note that attempting to import some declared modules may result in an +exception if the appropriate extras are not installed. + +Example:: + + "modules": ["chair", "chair.cushions", "python_sketches.nobody_expects"] + +.. note:: + + Making this a list of export specifiers instead would allow a distribution + to declare when a particular module requires a particular extra in order + to run correctly. On the other hand, there's an argument to be made that + that is the point where it starts to become worthwhile to split out a + separate distribution rather than using extras. + + +Namespaces +---------- + +A list of namespace packages that the distribution contributes modules to. + +On versions of Python prior to Python 3.3 (which provides native namespace +package support), installation tools SHOULD emit a suitable ``__init__.py`` +file to properly initialise the namespace rather than using a distribution +provided file. + +Installation tools SHOULD emit a warning and MAY emit an error if a +distribution declares a namespace package that conflicts the name of an +already installed module or vice-versa. + +Example:: + + "namespaces": ["python_sketches"] + + +Commands +-------- + +The ``commands`` mapping contains three subfields: + +* ``wrap_console``: console wrapper scripts to be generated by the installer +* ``wrap_gui``: GUI wrapper scripts to be generated by the installer +* ``prebuilt``: scripts created by the distribution's build process and + installed directly to the configured scripts directory + +``wrap_console`` and ``wrap_gui`` are both mappings of relatively arbitrary +script names to export specifiers. The script names must follow the rules +for distribution names. The export specifiers must refer to +either a package with a __main__ submodule (if no ``name`` subfield is +given in the export specifier) or else to a callable inside the named +module. + +Installation tools should generate appropriate wrappers as part of the +installation process. + +.. note:: + + Still needs more detail on what "appropriate wrapper" means. + +``prebuilt`` is a list of script paths, relative to the scripts directory in +a wheel file or following installation. They are provided for informational +purpose only - installing them is handled through the normal processes for +files created when building a distribution. + + +Example:: + + "commands": { + "wrap_console": [{"wrapwithpython": "chair.run_cli"}], + "wrap_gui": [{"wrapwithpythonw": "chair:run_gui"}], + "prebuilt": ["notawrapper"] + } + + + +Exports +------- + +The ``exports`` mapping contains relatively arbitrary subfields, each +defining an export group. Each export group is then a mapping of relatively +arbitrary subfields to export specifiers. + +Both export group names and export names must follow the rules for +distribution identifiers. It is suggested that export groups be named +after distributions to help avoid name conflicts. + +The meaning of exports within an export group is up to those defining the +export group. One common use case is to advertise plugins for use by other +software. + Install hooks ============= diff --git a/pep-0426/pydist-schema.json b/pep-0426/pydist-schema.json --- a/pep-0426/pydist-schema.json +++ b/pep-0426/pydist-schema.json @@ -136,6 +136,32 @@ "$ref": "#/definitions/provides_declaration" } }, + "modules": { + "description": "A list of modules and/or packages available for import after installing this distribution.", + "type": "array", + "items": { + "type": "string", + "$ref": "#/definitions/dotted_name" + } + }, + "namespaces": { + "description": "A list of namespace packages this distribution contributes to", + "type": "array", + "items": { + "type": "string", + "$ref": "#/definitions/dotted_name" + } + }, + "commands": { + "description": "Command line interfaces provided by this distribution", + "type": "object", + "$ref": "#/definitions/commands" + }, + "exports": { + "description": "Other exported interfaces provided by this distribution", + "type": "object", + "$ref": "#/definitions/exports" + }, "obsoleted_by": { "description": "A string that indicates that this project is no longer being developed. The named project provides a substitute or replacement.", "type": "string", @@ -155,11 +181,11 @@ "properties": { "postinstall": { "type": "string", - "$ref": "#/definitions/entry_point" + "$ref": "#/definitions/export_specifier" }, "preuninstall": { "type": "string", - "$ref": "#/definitions/entry_point" + "$ref": "#/definitions/export_specifier" } } }, @@ -221,6 +247,45 @@ "required": ["requires"], "additionalProperties": false }, + "commands": { + "type": "object", + "properties": { + "wrap_console": { + "type": "object", + "$ref": "#/definitions/export_map" + }, + "wrap_gui": { + "type": "object", + "$ref": "#/definitions/export_map" + }, + "prebuilt": { + "type": "array", + "items": { + "type": "string", + "$ref": "#/definitions/relative_path" + } + } + }, + "additionalProperties": false + }, + "exports": { + "type": "object", + "patternProperties": { + "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$": { + "type": "object", + "$ref": "#/definitions/export_map" + } + }, + "additionalProperties": false + }, + "export_map": { + "type": "object", + "patternProperties": { + "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$": { + "type": "string", + "$ref": "#/definitions/export_specifier" + } + }, "valid_name": { "type": "string", "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$" @@ -234,14 +299,21 @@ "environment_marker": { "type": "string" }, - "entry_point": { - "type": "string" - }, "document_name": { "type": "string" }, "extra_name" : { "type": "string" + }, + "relative_path" : { + "type": "string" + }, + "export_specifier": { + "type": "string", + }, + "dotted_name" : { + "type": "string", + "pattern": "^[A-Za-z]([0-9A-Za-z_])*([.][A-Za-z]([0-9A-Za-z_])*)*$" } } } -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 18:43:13 2013 From: python-checkins at python.org (nick.coghlan) Date: Fri, 2 Aug 2013 18:43:13 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Some_new_ideas_for_PEP_3150?= Message-ID: <3c6DhK73J3z7LpG@mail.python.org> http://hg.python.org/peps/rev/061a39bd358a changeset: 5028:061a39bd358a user: Nick Coghlan date: Sat Aug 03 02:42:54 2013 +1000 summary: Some new ideas for PEP 3150 files: pep-3150.txt | 120 ++++++++++++++++++++++++++++---------- 1 files changed, 87 insertions(+), 33 deletions(-) diff --git a/pep-3150.txt b/pep-3150.txt --- a/pep-3150.txt +++ b/pep-3150.txt @@ -19,9 +19,11 @@ Python statements that do not currently have an associated code suite. This clause will create a statement local namespace for additional names that are accessible in the associated statement, but do not become part of the -containing namespace. To permit a sane implementation strategy, forward -references to names from the ``given`` clause will need to be marked -explicitly. +containing namespace. + +Adoption of a new symbol, ``?``, is proposed to denote a forward reference +to the namespace created by running the associated code suite. It will be +a reference to a ``types.SimpleNamespace`` object. The primary motivation is to enable a more declarative style of programming, where the operation to be performed is presented to the reader first, and the @@ -72,12 +74,16 @@ name in the header line, with the actual definitions following in the indented clause. As a simple example:: - sorted_data = sorted(data, key=.sort_key) given: + sorted_data = sorted(data, key=?.sort_key) given: def sort_key(item): return item.attr1, item.attr2 -The leading ``.`` on ``.sort_key`` indicates to the compiler that this -is a forward reference to a name defined in the ``given`` clause. +The new symbol ``?`` is used to refer to the given namespace. It would be a +``types.SimpleNamespace`` instance, so ``?.sort_key`` functions as +a forward reference to a name defined in the ``given`` clause. + +A docstring would be permitted in the given clause, and would be attached +to the result namespace as its ``__doc__`` attribute. The ``pass`` statement is included to provide a consistent way to skip inclusion of a meaningful expression in the header line. While this is not @@ -94,7 +100,7 @@ # Explicit early binding via given clause seq = [] for i in range(10): - seq.append(.f) given i=i: + seq.append(.f) given i=i in: def f(): return i assert [f() for f in seq] == list(range(10)) @@ -105,7 +111,7 @@ The following statement:: - op(.f, .g) given bound_a=a, bound_b=b: + op(?.f, ?.g) given bound_a=a, bound_b=b in: def f(): return bound_a + bound_b def g(): @@ -121,9 +127,10 @@ return bound_a + bound_b def g(): return bound_a - bound_b - return f, g - __ref1, __ref2 = __scope(__arg1) - op(__ref1, __ref2) + return types.SimpleNamespace(**locals()) + __ref = __scope(__arg1, __arg2) + __ref.__doc__ = __scope.__doc__ + op(__ref.f, __ref.g) A ``given`` clause is essentially a nested function which is created and then immediately executed. Unless explicitly passed in, names are looked @@ -158,7 +165,7 @@ yield_stmt: yield_expr [given_clause] raise_stmt: 'raise' [test ['from' test]] [given_clause] assert_stmt: 'assert' test [',' test] [given_clause] - given_clause: "given" (NAME '=' test)* ":" suite + given_clause: "given" [(NAME '=' test)+ "in"]":" suite (Note that ``expr_stmt`` in the grammar is a slight misnomer, as it covers assignment and augmented assignment in addition to simple expression @@ -207,7 +214,7 @@ flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt In addition to the above changes, the definition of ``atom`` would be changed -to also allow ``"." NAME``. The restriction of this usage to statements with +to also allow ``?``. The restriction of this usage to statements with an associated ``given`` clause would be handled by a later stage of the compilation process (likely AST construction, which already enforces other restrictions where the grammar is overly permissive in order to @@ -277,13 +284,14 @@ However, while they are the initial motivating use case, limiting this feature solely to simple assignments would be overly restrictive. Once the feature is defined at all, it would be quite arbitrary to prevent its use -for augmented assignments, return statements, yield expressions and -arbitrary expressions that may modify the application state. +for augmented assignments, return statements, yield expressions, +comprehensions and arbitrary expressions that may modify the +application state. The ``given`` clause may also function as a more readable alternative to some uses of lambda expressions and similar constructs when passing one-off functions to operations -like ``sorted()``. +like ``sorted()`` or in callback based event-driven programming. In module and class level code, the ``given`` clause will serve as a clear and reliable replacement for usage of the ``del`` statement to keep @@ -350,7 +358,7 @@ # would be equivalent to - seq2 = .result given seq=seq: + seq2 = ?.result given seq=seq: result = [] for y in seq: if p(y): @@ -367,7 +375,7 @@ provide a precisely equivalent expansion for a generator expression. The closest it can get is to define an additional level of scoping:: - seq2 = .g(seq) given: + seq2 = ?.g(seq) given: def g(seq): for y in seq: if p(y): @@ -375,6 +383,22 @@ if q(x): yield x +This limitation could be remedied by permitting the given clause to be +a generator function, in which case ? would refer to a generator-iterator +object rather than a simple namespace:: + + seq2 = ? given seq=seq in: + for y in seq: + if p(y): + for x in y: + if q(x): + yield x + +However, this would make the meaning of "?" quite ambiguous, even more so +than is already the case for the meaning of ``def`` statements (which will +usually have a docstring indicating whether or not a function definition is +actually a generator) + Explaining Decorator Clause Evaluation and Application ------------------------------------------------------ @@ -477,14 +501,19 @@ I believe the proposal in this PEP would finally let Python get close to the "executable pseudocode" bar for the kind of thought expressed above:: - sorted_list = sorted(original, key=.sort_key) given: - def sort_key(item): + sorted_list = sorted(original, key=?.key) given: + def key(item): return item.attr1, item.attr2 -Everything is in the same order as it was in the user's original thought, the -only addition they have to make is to give the sorting criteria a name so that -the usage can be linked up to the subsequent definition. - +Everything is in the same order as it was in the user's original thought, and +they don't even need to come up with a name for the sorting criteria: it is +possible to reuse the keyword argument name directly. + +A possible enhancement to those proposal would be to provide a convenient +shorthand syntax to say "use the given clause contents as keyword +arguments". Even without dedicated syntax, that can be written simply as +``**vars(?)``. + Harmful to Introspection ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -516,7 +545,7 @@ This is more of a deficiency in the PEP rather than the idea, though. If it wasn't a real world problem, we wouldn't get so many complaints about the lack of multi-line lambda support and Ruby's block construct -probaly wouldn't be quite so popular. +probably wouldn't be quite so popular. Open Questions @@ -525,9 +554,12 @@ Syntax for Forward References ----------------------------- -The leading ``.`` arguably fails the "syntax shall not look like grit on -Uncle Tim's monitor" test. However, it does have the advantages of being -easy to type and already having an association with namespaces. +The ``?`` symbol is proposed for forward references to the given namespace +as it is short, currently unused and suggests "there's something missing +here that will be filled in later". + +The proposal in the PEP doesn't neatly parallel any existing Python feature, +so reusing an already used symbol has been deliberately avoided. Handling of ``nonlocal`` and ``global`` @@ -541,8 +573,8 @@ functions were defined as in the expansion above. -Detailed Semantics #3: Handling of ``break`` and ``continue`` -------------------------------------------------------------- +Handling of ``break`` and ``continue`` +-------------------------------------- ``break`` and ``continue`` will operate as if the anonymous functions were defined as in the expansion above. They will be syntax errors if they occur @@ -561,6 +593,25 @@ Examples ======== +Defining callbacks for event driven programming:: + + # Current Python (definition before use) + def cb(sock): + # Do something with socket + def eb(exc): + logging.exception( + "Failed connecting to %s:%s", host, port) + loop.create_connection((host, port), cb, eb) given: + + # Becomes: + loop.create_connection((host, port), ?.cb, ?.eb) given: + def cb(sock): + # Do something with socket + def eb(exc): + logging.exception( + "Failed connecting to %s:%s", host, port) + + Defining "one-off" classes which typically only have a single instance:: # Current Python (instantiation after definition) @@ -579,7 +630,7 @@ ... # However many lines # Becomes: - public_name = .MeaningfulClassName(*params) given: + public_name = ?.MeaningfulClassName(*params) given: class MeaningfulClassName(): ... # Should trawl the stdlib for an example of doing this @@ -593,7 +644,7 @@ del _createenviron # Becomes: - environ = ._createenviron() given: + environ = ?._createenviron() given: def _createenviron(): ... # 27 line function @@ -606,7 +657,7 @@ return decorating_function # Becomes: - return .decorating_function given: + return ?.decorating_function given: # Cell variables rather than locals, but should give similar speedup tuple, sorted, len, KeyError = tuple, sorted, len, KeyError def decorating_function(user_function): @@ -701,6 +752,9 @@ .. [9] Possible PEP 3150 style guidelines (#2): http://mail.python.org/pipermail/python-ideas/2011-October/012341.html +.. [10] Multi-line lambdas (again!) + http://mail.python.org/pipermail/python-ideas/2013-August/022526.html + Copyright ========= -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 2 20:40:41 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 2 Aug 2013 20:40:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Backout_62658d9d8926_=28is?= =?utf-8?q?sue_=2310241=29=3A_it_causes_a_crash_at_shutdown_when?= Message-ID: <3c6HHs5rS9z7LjN@mail.python.org> http://hg.python.org/cpython/rev/314a872f54e1 changeset: 84979:314a872f54e1 user: Antoine Pitrou date: Fri Aug 02 20:39:46 2013 +0200 summary: Backout 62658d9d8926 (issue #10241): it causes a crash at shutdown when deallocating a Tkapp object. files: Include/pystate.h | 3 --- Misc/NEWS | 3 --- Python/import.c | 2 -- Python/pystate.c | 25 ------------------------- 4 files changed, 0 insertions(+), 33 deletions(-) diff --git a/Include/pystate.h b/Include/pystate.h --- a/Include/pystate.h +++ b/Include/pystate.h @@ -134,9 +134,6 @@ PyAPI_FUNC(int) PyState_RemoveModule(struct PyModuleDef*); #endif PyAPI_FUNC(PyObject*) PyState_FindModule(struct PyModuleDef*); -#ifndef Py_LIMITED_API -PyAPI_FUNC(void) _PyState_ClearModules(void); -#endif PyAPI_FUNC(PyThreadState *) PyThreadState_New(PyInterpreterState *); PyAPI_FUNC(PyThreadState *) _PyThreadState_Prealloc(PyInterpreterState *); diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -12,9 +12,6 @@ - Issue #17899: Fix rare file descriptor leak in os.listdir(). -- Issue #10241: Clear extension module dict copies at interpreter shutdown. - Patch by Neil Schemenauer, minimally modified. - - Issue #9035: ismount now recognises volumes mounted below a drive root on Windows. Original patch by Atsuo Ishimoto. diff --git a/Python/import.c b/Python/import.c --- a/Python/import.c +++ b/Python/import.c @@ -380,8 +380,6 @@ builtins = interp->builtins; interp->builtins = PyDict_New(); Py_DECREF(builtins); - /* Clear module dict copies stored in the interpreter state */ - _PyState_ClearModules(); /* Collect references */ _PyGC_CollectNoFail(); /* Dump GC stats before it's too late, since it uses the warnings diff --git a/Python/pystate.c b/Python/pystate.c --- a/Python/pystate.c +++ b/Python/pystate.c @@ -320,31 +320,6 @@ return PyList_SetItem(state->modules_by_index, index, Py_None); } -/* used by import.c:PyImport_Cleanup */ -void -_PyState_ClearModules(void) -{ - PyInterpreterState *state = PyThreadState_GET()->interp; - if (state->modules_by_index) { - Py_ssize_t i; - for (i = 0; i < PyList_GET_SIZE(state->modules_by_index); i++) { - PyObject *m = PyList_GET_ITEM(state->modules_by_index, i); - if (PyModule_Check(m)) { - /* cleanup the saved copy of module dicts */ - PyModuleDef *md = PyModule_GetDef(m); - if (md) - Py_CLEAR(md->m_base.m_copy); - } - } - /* Setting modules_by_index to NULL could be dangerous, so we - clear the list instead. */ - if (PyList_SetSlice(state->modules_by_index, - 0, PyList_GET_SIZE(state->modules_by_index), - NULL)) - PyErr_WriteUnraisable(state->modules_by_index); - } -} - void PyThreadState_Clear(PyThreadState *tstate) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 03:06:14 2013 From: python-checkins at python.org (ned.deily) Date: Sat, 3 Aug 2013 03:06:14 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE3MDQ2?= =?utf-8?q?=3A_Fix_test=5Fsubprocess_test=5Fexecutable=5Fwithout=5Fcwd_bro?= =?utf-8?q?ken_test_case=2E?= Message-ID: <3c6Rrk5m70z7Ljg@mail.python.org> http://hg.python.org/cpython/rev/f37b336bfbca changeset: 84980:f37b336bfbca branch: 3.3 parent: 84976:78db41e4c6a9 user: Ned Deily date: Fri Aug 02 18:02:21 2013 -0700 summary: Issue #17046: Fix test_subprocess test_executable_without_cwd broken test case. files: Lib/test/test_subprocess.py | 3 ++- Misc/NEWS | 2 ++ 2 files changed, 4 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -382,7 +382,8 @@ def test_executable_without_cwd(self): # For a normal installation, it should work without 'cwd' # argument. For test runs in the build directory, see #7774. - self._assert_cwd('', "somethingyoudonthave", executable=sys.executable) + self._assert_cwd(os.getcwd(), "somethingyoudonthave", + executable=sys.executable) def test_stdin_pipe(self): # stdin redirection diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -230,6 +230,8 @@ Tests ----- +- Issue #17046: Fix broken test_executable_without_cwd in test_subprocess. + - Issue #15415: Add new temp_dir() and change_cwd() context managers to test.support, and refactor temp_cwd() to use them. Patch by Chris Jerdonek. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 03:06:16 2013 From: python-checkins at python.org (ned.deily) Date: Sat, 3 Aug 2013 03:06:16 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2317046=3A_merge_from_3=2E3?= Message-ID: <3c6Rrm0mdcz7Ljg@mail.python.org> http://hg.python.org/cpython/rev/e3e28ee5b5c3 changeset: 84981:e3e28ee5b5c3 parent: 84979:314a872f54e1 parent: 84980:f37b336bfbca user: Ned Deily date: Fri Aug 02 18:05:31 2013 -0700 summary: Issue #17046: merge from 3.3 files: Lib/test/test_subprocess.py | 3 ++- Misc/NEWS | 2 ++ 2 files changed, 4 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -416,7 +416,8 @@ def test_executable_without_cwd(self): # For a normal installation, it should work without 'cwd' # argument. For test runs in the build directory, see #7774. - self._assert_cwd('', "somethingyoudonthave", executable=sys.executable) + self._assert_cwd(os.getcwd(), "somethingyoudonthave", + executable=sys.executable) def test_stdin_pipe(self): # stdin redirection diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -599,6 +599,8 @@ Tests ----- +- Issue #17046: Fix broken test_executable_without_cwd in test_subprocess. + - Issue #15415: Add new temp_dir() and change_cwd() context managers to test.support, and refactor temp_cwd() to use them. Patch by Chris Jerdonek. -- Repository URL: http://hg.python.org/cpython From tjreedy at udel.edu Sat Aug 3 02:06:55 2013 From: tjreedy at udel.edu (Terry Reedy) Date: Fri, 02 Aug 2013 20:06:55 -0400 Subject: [Python-checkins] peps: Use Guido's preferred wording re: line length In-Reply-To: <3c64955v4yz7Llj@mail.python.org> References: <3c64955v4yz7Llj@mail.python.org> Message-ID: <51FC499F.3070703@udel.edu> On 8/2/2013 6:19 AM, nick.coghlan wrote: > +The Python standard library is conservative and requires limiting > +lines to 79 characters (and docstrings/comments to 72). If you (and Guido) mean that as a hard limit, then patchcheck should check line lengths as well as trailing whitespace. From ncoghlan at gmail.com Sat Aug 3 04:26:03 2013 From: ncoghlan at gmail.com (Nick Coghlan) Date: Sat, 3 Aug 2013 12:26:03 +1000 Subject: [Python-checkins] peps: Use Guido's preferred wording re: line length In-Reply-To: <51FC499F.3070703@udel.edu> References: <3c64955v4yz7Llj@mail.python.org> <51FC499F.3070703@udel.edu> Message-ID: On 3 Aug 2013 11:07, "Terry Reedy" wrote: > > On 8/2/2013 6:19 AM, nick.coghlan wrote: > >> +The Python standard library is conservative and requires limiting >> +lines to 79 characters (and docstrings/comments to 72). > > > If you (and Guido) mean that as a hard limit, then patchcheck should check line lengths as well as trailing whitespace. That raises issues when modifying existing non-compliant files, because it removes the human judgement on whether a non-compliance is worth fixing or not. Cheers, Nick. > _______________________________________________ > Python-checkins mailing list > Python-checkins at python.org > http://mail.python.org/mailman/listinfo/python-checkins -------------- next part -------------- An HTML attachment was scrubbed... URL: From tjreedy at udel.edu Sat Aug 3 04:45:16 2013 From: tjreedy at udel.edu (Terry Reedy) Date: Fri, 02 Aug 2013 22:45:16 -0400 Subject: [Python-checkins] peps: Use Guido's preferred wording re: line length In-Reply-To: References: <3c64955v4yz7Llj@mail.python.org> <51FC499F.3070703@udel.edu> Message-ID: <51FC6EBC.5090209@udel.edu> On 8/2/2013 10:26 PM, Nick Coghlan wrote: > > On 3 Aug 2013 11:07, "Terry Reedy" > wrote: > > > > On 8/2/2013 6:19 AM, nick.coghlan wrote: > > > >> +The Python standard library is conservative and requires limiting > >> +lines to 79 characters (and docstrings/comments to 72). > > > > > > If you (and Guido) mean that as a hard limit, then patchcheck should > check line lengths as well as trailing whitespace. > > That raises issues when modifying existing non-compliant files, because > it removes the human judgement on whether a non-compliance is worth > fixing or not. I meant tools/scripts/patchcheck.py, not the pre-commit hook. The check would inform (especially for old files) or remind (for new files) so that judgment could be applied. From ncoghlan at gmail.com Sat Aug 3 04:51:46 2013 From: ncoghlan at gmail.com (Nick Coghlan) Date: Sat, 3 Aug 2013 12:51:46 +1000 Subject: [Python-checkins] peps: Use Guido's preferred wording re: line length In-Reply-To: <51FC6EBC.5090209@udel.edu> References: <3c64955v4yz7Llj@mail.python.org> <51FC499F.3070703@udel.edu> <51FC6EBC.5090209@udel.edu> Message-ID: On 3 Aug 2013 12:45, "Terry Reedy" wrote: > > > > On 8/2/2013 10:26 PM, Nick Coghlan wrote: >> >> >> On 3 Aug 2013 11:07, "Terry Reedy" > > wrote: >> > >> > On 8/2/2013 6:19 AM, nick.coghlan wrote: >> > >> >> +The Python standard library is conservative and requires limiting >> >> +lines to 79 characters (and docstrings/comments to 72). >> > >> > >> > If you (and Guido) mean that as a hard limit, then patchcheck should >> check line lengths as well as trailing whitespace. >> >> That raises issues when modifying existing non-compliant files, because >> it removes the human judgement on whether a non-compliance is worth >> fixing or not. > > > I meant tools/scripts/patchcheck.py, not the pre-commit hook. The check would inform (especially for old files) or remind (for new files) so that judgment could be applied. Ah, right. Yeah, that may be reasonable. A warning option on reindent.py may be a place to start if someone wanted to implement it. Whether or not patchcheck used that option would likely depend on the initial results of running it manually :) Cheers, Nick. > > _______________________________________________ > Python-checkins mailing list > Python-checkins at python.org > http://mail.python.org/mailman/listinfo/python-checkins -------------- next part -------------- An HTML attachment was scrubbed... URL: From solipsis at pitrou.net Sat Aug 3 05:49:56 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sat, 03 Aug 2013 05:49:56 +0200 Subject: [Python-checkins] Daily reference leaks (e3e28ee5b5c3): sum=0 Message-ID: results for e3e28ee5b5c3 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflog7DmNVa', '-x'] From python-checkins at python.org Sat Aug 3 11:52:47 2013 From: python-checkins at python.org (larry.hastings) Date: Sat, 3 Aug 2013 11:52:47 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Fix_minor_documentation_ma?= =?utf-8?q?rkup_error=2E?= Message-ID: <3c6gXH1Nkkz7LjN@mail.python.org> http://hg.python.org/cpython/rev/cda76ef561a2 changeset: 84982:cda76ef561a2 user: Larry Hastings date: Sat Aug 03 02:49:53 2013 -0700 summary: Fix minor documentation markup error. files: Doc/library/ssl.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -859,7 +859,7 @@ does not contain certificates from *capath* unless a certificate was requested and loaded by a SSL connection. - ..versionadded:: 3.4 + .. versionadded:: 3.4 .. method:: SSLContext.set_default_verify_paths() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 13:08:36 2013 From: python-checkins at python.org (mark.dickinson) Date: Sat, 3 Aug 2013 13:08:36 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Minor_consistency_fixes_fo?= =?utf-8?q?r_some_longobject=2Ec_exception_messages=3A?= Message-ID: <3c6jCm1Z7GzSpk@mail.python.org> http://hg.python.org/cpython/rev/dab7d6f33b87 changeset: 84983:dab7d6f33b87 user: Mark Dickinson date: Sat Aug 03 12:08:22 2013 +0100 summary: Minor consistency fixes for some longobject.c exception messages: - replace 'long int' / 'long' by 'int' - fix capitalization of "Python" in PyLong_AsUnsignedLong - "is too large" -> "too large", for consistency with other messages. files: Objects/longobject.c | 8 ++++---- 1 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Objects/longobject.c b/Objects/longobject.c --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -549,7 +549,7 @@ x = (x << PyLong_SHIFT) | v->ob_digit[i]; if ((x >> PyLong_SHIFT) != prev) { PyErr_SetString(PyExc_OverflowError, - "python int too large to convert " + "Python int too large to convert " "to C unsigned long"); return (unsigned long) -1; } @@ -1602,7 +1602,7 @@ */ if (size_a > PY_SSIZE_T_MAX / PyLong_SHIFT) { PyErr_SetString(PyExc_OverflowError, - "long is too large to format"); + "int too large to format"); return -1; } /* the expression size_a * PyLong_SHIFT is now safe from overflow */ @@ -1785,7 +1785,7 @@ /* Ensure overflow doesn't occur during computation of sz. */ if (size_a > (PY_SSIZE_T_MAX - 3) / PyLong_SHIFT) { PyErr_SetString(PyExc_OverflowError, - "int is too large to format"); + "int too large to format"); return -1; } size_a_in_bits = (size_a - 1) * PyLong_SHIFT + @@ -2658,7 +2658,7 @@ x = _PyLong_Frexp((PyLongObject *)v, &exponent); if ((x == -1.0 && PyErr_Occurred()) || exponent > DBL_MAX_EXP) { PyErr_SetString(PyExc_OverflowError, - "long int too large to convert to float"); + "int too large to convert to float"); return -1.0; } return ldexp(x, (int)exponent); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 15:03:41 2013 From: python-checkins at python.org (nick.coghlan) Date: Sat, 3 Aug 2013 15:03:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogQ2xvc2UgIzE4Mzk2?= =?utf-8?q?=3A_fix_spurious_test=5Fsignal_failure_on_Windows?= Message-ID: <3c6lmY6Sk9z7Ljc@mail.python.org> http://hg.python.org/cpython/rev/b7834800562f changeset: 84984:b7834800562f branch: 3.3 parent: 84980:f37b336bfbca user: Nick Coghlan date: Sat Aug 03 22:56:30 2013 +1000 summary: Close #18396: fix spurious test_signal failure on Windows signal.getsignal returns None for some signals if faulthandler is enabled (Patch by Jeremy Kloth) files: Lib/test/test_signal.py | 10 ++++++++-- Misc/NEWS | 3 +++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -209,11 +209,17 @@ def test_issue9324(self): # Updated for issue #10003, adding SIGBREAK handler = lambda x, y: None + checked = set() for sig in (signal.SIGABRT, signal.SIGBREAK, signal.SIGFPE, signal.SIGILL, signal.SIGINT, signal.SIGSEGV, signal.SIGTERM): - # Set and then reset a handler for signals that work on windows - signal.signal(sig, signal.signal(sig, handler)) + # Set and then reset a handler for signals that work on windows. + # Issue #18396, only for signals without a C-level handler. + if signal.getsignal(sig) is not None: + signal.signal(sig, signal.signal(sig, handler)) + checked.add(sig) + # Issue #18396: Ensure the above loop at least tested *something* + self.assertTrue(checked) with self.assertRaises(ValueError): signal.signal(-1, handler) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -230,6 +230,9 @@ Tests ----- +- Issue #18396: Fix spurious test failure in test_signal on Windows when + faulthandler is enabled (Patch by Jeremy Kloth) + - Issue #17046: Fix broken test_executable_without_cwd in test_subprocess. - Issue #15415: Add new temp_dir() and change_cwd() context managers to -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 15:03:43 2013 From: python-checkins at python.org (nick.coghlan) Date: Sat, 3 Aug 2013 15:03:43 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_=2318396_from_3=2E3?= Message-ID: <3c6lmb1Tmhz7Ljg@mail.python.org> http://hg.python.org/cpython/rev/6fc71ed6a910 changeset: 84985:6fc71ed6a910 parent: 84983:dab7d6f33b87 parent: 84984:b7834800562f user: Nick Coghlan date: Sat Aug 03 23:03:27 2013 +1000 summary: Merge #18396 from 3.3 files: Lib/test/test_signal.py | 10 ++++++++-- Misc/NEWS | 3 +++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -206,11 +206,17 @@ def test_issue9324(self): # Updated for issue #10003, adding SIGBREAK handler = lambda x, y: None + checked = set() for sig in (signal.SIGABRT, signal.SIGBREAK, signal.SIGFPE, signal.SIGILL, signal.SIGINT, signal.SIGSEGV, signal.SIGTERM): - # Set and then reset a handler for signals that work on windows - signal.signal(sig, signal.signal(sig, handler)) + # Set and then reset a handler for signals that work on windows. + # Issue #18396, only for signals without a C-level handler. + if signal.getsignal(sig) is not None: + signal.signal(sig, signal.signal(sig, handler)) + checked.add(sig) + # Issue #18396: Ensure the above loop at least tested *something* + self.assertTrue(checked) with self.assertRaises(ValueError): signal.signal(-1, handler) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -599,6 +599,9 @@ Tests ----- +- Issue #18396: Fix spurious test failure in test_signal on Windows when + faulthandler is enabled (Patch by Jeremy Kloth) + - Issue #17046: Fix broken test_executable_without_cwd in test_subprocess. - Issue #15415: Add new temp_dir() and change_cwd() context managers to -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 15:43:07 2013 From: python-checkins at python.org (nick.coghlan) Date: Sat, 3 Aug 2013 15:43:07 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PJE=27s_feedback_on_PEP_426_e?= =?utf-8?q?xports?= Message-ID: <3c6mf35RZvz7Lkh@mail.python.org> http://hg.python.org/peps/rev/59bc3da3abc8 changeset: 5029:59bc3da3abc8 user: Nick Coghlan date: Sat Aug 03 23:42:56 2013 +1000 summary: PJE's feedback on PEP 426 exports files: pep-0426.txt | 45 +++++++++++++++---------- pep-0426/pydist-schema.json | 23 ++++++++---- 2 files changed, 42 insertions(+), 26 deletions(-) diff --git a/pep-0426.txt b/pep-0426.txt --- a/pep-0426.txt +++ b/pep-0426.txt @@ -1454,10 +1454,6 @@ * ``requires_extra``: indicates the export will only work correctly if the additional dependencies named in the given extra are available. -Note that installation of extras is not tracked directly: they are merely -a convenient way to refer to a set of dependencies that will be checked for -at runtime. - .. note:: I tried this as a mapping with subfields, and it made the examples below @@ -1520,19 +1516,21 @@ * ``prebuilt``: scripts created by the distribution's build process and installed directly to the configured scripts directory -``wrap_console`` and ``wrap_gui`` are both mappings of relatively arbitrary -script names to export specifiers. The script names must follow the rules -for distribution names. The export specifiers must refer to -either a package with a __main__ submodule (if no ``name`` subfield is -given in the export specifier) or else to a callable inside the named -module. +``wrap_console`` and ``wrap_gui`` are both mappings of script names to +export specifiers. The script names must follow the same naming rules as +distribution names. + +The export specifiers for wrapper scripts must refer to either a package +with a __main__ submodule (if no ``name`` subfield is given in the export +specifier) or else to a callable inside the named module. Installation tools should generate appropriate wrappers as part of the installation process. .. note:: - Still needs more detail on what "appropriate wrapper" means. + Still needs more detail on what "appropriate wrapper" means. For now, + refer to what setuptools and zc.buildout generate as wrapper scripts. ``prebuilt`` is a list of script paths, relative to the scripts directory in a wheel file or following installation. They are provided for informational @@ -1553,17 +1551,22 @@ Exports ------- -The ``exports`` mapping contains relatively arbitrary subfields, each -defining an export group. Each export group is then a mapping of relatively -arbitrary subfields to export specifiers. +The ``exports`` field is a mapping containing dotted names as keys. Each +key defines an export group. Export group names SHOULD correspond to +module names in the distribution responsible that defines the meaning +of the export group. + +Each export group is then a mapping of arbitrary non-empty string keys +to export specifiers. The interpretation of the individual export keys is +defined by the distribution that i Both export group names and export names must follow the rules for distribution identifiers. It is suggested that export groups be named after distributions to help avoid name conflicts. -The meaning of exports within an export group is up to those defining the -export group. One common use case is to advertise plugins for use by other -software. +The meaning of exports within an export group is up to the distribution +that defines the export group. One common use case is to allow other +distributions to advertise plugins for use by the defining distribution. Install hooks @@ -1703,13 +1706,19 @@ ============================== Extras are additional dependencies that enable an optional aspect -of the distribution, generally corresponding to a ``try: import +of the distribution, often corresponding to a ``try: import optional_dependency ...`` block in the code. To support the use of the distribution with or without the optional dependencies they are listed separately from the distribution's core dependencies and must be requested explicitly, either in the dependency specifications of another distribution, or else when issuing a command to an installation tool. +Note that installation of extras is not tracked directly by installation +tools: extras are merely a convenient way to indicate a set of dependencies +that is needed to provide some optional functionality of the distribution. +If selective *installation* of components is desired, then multiple +distributions must be defined rather than relying on the extras system. + The names of extras MUST abide by the same restrictions as those for distribution names. diff --git a/pep-0426/pydist-schema.json b/pep-0426/pydist-schema.json --- a/pep-0426/pydist-schema.json +++ b/pep-0426/pydist-schema.json @@ -17,7 +17,7 @@ "name": { "description": "The name of the distribution.", "type": "string", - "$ref": "#/definitions/valid_name" + "$ref": "#/definitions/distribution_name" }, "version": { "description": "The distribution's public version identifier", @@ -252,11 +252,11 @@ "properties": { "wrap_console": { "type": "object", - "$ref": "#/definitions/export_map" + "$ref": "#/definitions/command_map" }, "wrap_gui": { "type": "object", - "$ref": "#/definitions/export_map" + "$ref": "#/definitions/command_map" }, "prebuilt": { "type": "array", @@ -271,22 +271,29 @@ "exports": { "type": "object", "patternProperties": { - "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$": { + "^[A-Za-z]([0-9A-Za-z_])*([.][A-Za-z]([0-9A-Za-z_])*)*$": { "type": "object", - "$ref": "#/definitions/export_map" + "patternProperties": { + ".": { + "type": "string", + "$ref": "#/definitions/export_specifier" + } + }, + "additionalProperties": false } }, "additionalProperties": false }, - "export_map": { + "command_map": { "type": "object", "patternProperties": { "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$": { "type": "string", "$ref": "#/definitions/export_specifier" - } }, - "valid_name": { + "additionalProperties": false + }, + "distribution_name": { "type": "string", "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$" }, -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Aug 3 16:19:08 2013 From: python-checkins at python.org (matthias.klose) Date: Sat, 3 Aug 2013 16:19:08 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_-_Fix_a_fcntl_?= =?utf-8?q?test_case_on_KFreeBSD=2C_Debian_=23708653_=28Petr_Salinger=29?= =?utf-8?q?=2E?= Message-ID: <3c6nRc517Lz7Ljc@mail.python.org> http://hg.python.org/cpython/rev/c503cea0e8c2 changeset: 84986:c503cea0e8c2 branch: 3.3 parent: 84984:b7834800562f user: doko at ubuntu.com date: Sat Aug 03 16:12:33 2013 +0200 summary: - Fix a fcntl test case on KFreeBSD, Debian #708653 (Petr Salinger). files: Lib/test/test_fcntl.py | 2 ++ Misc/NEWS | 2 ++ 2 files changed, 4 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py --- a/Lib/test/test_fcntl.py +++ b/Lib/test/test_fcntl.py @@ -35,6 +35,8 @@ pid_t = 'l' lockdata = struct.pack(off_t + off_t + pid_t + 'hh', 0, 0, 0, fcntl.F_WRLCK, 0) + elif sys.platform.startswith('gnukfreebsd'): + lockdata = struct.pack('qqihhi', 0, 0, 0, fcntl.F_WRLCK, 0, 0) elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']: lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) elif sys.platform in ['os2emx']: diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -230,6 +230,8 @@ Tests ----- +- Fix a fcntl test case on KFreeBSD, Debian #708653 (Petr Salinger). + - Issue #18396: Fix spurious test failure in test_signal on Windows when faulthandler is enabled (Patch by Jeremy Kloth) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 16:19:10 2013 From: python-checkins at python.org (matthias.klose) Date: Sat, 3 Aug 2013 16:19:10 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_-_Fix_a_fcntl_test_case_on_KFreeBSD=2C_Debian_=23708653_?= =?utf-8?q?=28Petr_Salinger=29=2E?= Message-ID: <3c6nRf01qLz7LkD@mail.python.org> http://hg.python.org/cpython/rev/9cc42abeae06 changeset: 84987:9cc42abeae06 parent: 84985:6fc71ed6a910 parent: 84986:c503cea0e8c2 user: doko at ubuntu.com date: Sat Aug 03 16:18:55 2013 +0200 summary: - Fix a fcntl test case on KFreeBSD, Debian #708653 (Petr Salinger). files: Lib/test/test_fcntl.py | 2 ++ Misc/NEWS | 2 ++ 2 files changed, 4 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py --- a/Lib/test/test_fcntl.py +++ b/Lib/test/test_fcntl.py @@ -32,6 +32,8 @@ pid_t = 'l' lockdata = struct.pack(off_t + off_t + pid_t + 'hh', 0, 0, 0, fcntl.F_WRLCK, 0) + elif sys.platform.startswith('gnukfreebsd'): + lockdata = struct.pack('qqihhi', 0, 0, 0, fcntl.F_WRLCK, 0, 0) elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']: lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) else: diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -599,6 +599,8 @@ Tests ----- +- Fix a fcntl test case on KFreeBSD, Debian #708653 (Petr Salinger). + - Issue #18396: Fix spurious test failure in test_signal on Windows when faulthandler is enabled (Patch by Jeremy Kloth) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 18:15:19 2013 From: python-checkins at python.org (mark.dickinson) Date: Sat, 3 Aug 2013 18:15:19 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Remove_debugging_print=2E?= Message-ID: <3c6r1g14BLzS6T@mail.python.org> http://hg.python.org/cpython/rev/d86aec3f61b0 changeset: 84988:d86aec3f61b0 user: Mark Dickinson date: Sat Aug 03 17:14:50 2013 +0100 summary: Remove debugging print. files: Lib/test/test_wave.py | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_wave.py b/Lib/test/test_wave.py --- a/Lib/test/test_wave.py +++ b/Lib/test/test_wave.py @@ -75,7 +75,6 @@ with self.assertRaises(wave.Error): with wave.open(TESTFN, 'wb') as f: pass - print('in test:', f._file) with self.assertRaises(wave.Error): with open(TESTFN, 'wb') as testfile: with wave.open(testfile): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 18:31:37 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 3 Aug 2013 18:31:37 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE3OTk4?= =?utf-8?q?=3A_Fix_an_internal_error_in_regular_expression_engine=2E?= Message-ID: <3c6rNT6B68zPmC@mail.python.org> http://hg.python.org/cpython/rev/86b8b035529b changeset: 84989:86b8b035529b branch: 3.3 parent: 84986:c503cea0e8c2 user: Serhiy Storchaka date: Sat Aug 03 19:18:38 2013 +0300 summary: Issue #17998: Fix an internal error in regular expression engine. files: Lib/test/test_re.py | 10 ++++++++++ Misc/NEWS | 2 ++ Modules/_sre.c | 12 ++++++------ Modules/sre.h | 2 +- 4 files changed, 19 insertions(+), 7 deletions(-) diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -1040,6 +1040,16 @@ with self.assertRaisesRegex(sre_constants.error, '\?foo'): re.compile('(?P)') + def test_issue17998(self): + for reps in '*', '+', '?', '{1}': + for mod in '', '?': + pattern = '.' + reps + mod + 'yz' + self.assertEqual(re.compile(pattern, re.S).findall('xyz'), + ['xyz'], msg=pattern) + pattern = pattern.encode() + self.assertEqual(re.compile(pattern, re.S).findall(b'xyz'), + [b'xyz'], msg=pattern) + def run_re_tests(): from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -59,6 +59,8 @@ Library ------- +- Issue #17998: Fix an internal error in regular expression engine. + - Issue #17557: Fix os.getgroups() to work with the modified behavior of getgroups(2) on OS X 10.8. Original patch by Mateusz Lenik. diff --git a/Modules/_sre.c b/Modules/_sre.c --- a/Modules/_sre.c +++ b/Modules/_sre.c @@ -997,7 +997,7 @@ TRACE(("|%p|%p|REPEAT_ONE %d %d\n", ctx->pattern, ctx->ptr, ctx->pattern[1], ctx->pattern[2])); - if (ctx->pattern[1] > (end - ctx->ptr) / state->charsize) + if ((Py_ssize_t) ctx->pattern[1] > (end - ctx->ptr) / state->charsize) RETURN_FAILURE; /* cannot match */ state->ptr = ctx->ptr; @@ -1081,7 +1081,7 @@ TRACE(("|%p|%p|MIN_REPEAT_ONE %d %d\n", ctx->pattern, ctx->ptr, ctx->pattern[1], ctx->pattern[2])); - if (ctx->pattern[1] > (end - ctx->ptr) / state->charsize) + if ((Py_ssize_t) ctx->pattern[1] > (end - ctx->ptr) / state->charsize) RETURN_FAILURE; /* cannot match */ state->ptr = ctx->ptr; @@ -1180,7 +1180,7 @@ TRACE(("|%p|%p|MAX_UNTIL %d\n", ctx->pattern, ctx->ptr, ctx->count)); - if (ctx->count < ctx->u.rep->pattern[1]) { + if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { /* not enough matches */ ctx->u.rep->count = ctx->count; DO_JUMP(JUMP_MAX_UNTIL_1, jump_max_until_1, @@ -1194,7 +1194,7 @@ RETURN_FAILURE; } - if ((ctx->count < ctx->u.rep->pattern[2] || + if ((ctx->count < (Py_ssize_t) ctx->u.rep->pattern[2] || ctx->u.rep->pattern[2] == SRE_MAXREPEAT) && state->ptr != ctx->u.rep->last_ptr) { /* we may have enough matches, but if we can @@ -1243,7 +1243,7 @@ TRACE(("|%p|%p|MIN_UNTIL %d %p\n", ctx->pattern, ctx->ptr, ctx->count, ctx->u.rep->pattern)); - if (ctx->count < ctx->u.rep->pattern[1]) { + if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { /* not enough matches */ ctx->u.rep->count = ctx->count; DO_JUMP(JUMP_MIN_UNTIL_1, jump_min_until_1, @@ -1272,7 +1272,7 @@ LASTMARK_RESTORE(); - if ((ctx->count >= ctx->u.rep->pattern[2] + if ((ctx->count >= (Py_ssize_t) ctx->u.rep->pattern[2] && ctx->u.rep->pattern[2] != SRE_MAXREPEAT) || state->ptr == ctx->u.rep->last_ptr) RETURN_FAILURE; diff --git a/Modules/sre.h b/Modules/sre.h --- a/Modules/sre.h +++ b/Modules/sre.h @@ -19,7 +19,7 @@ #if SIZEOF_SIZE_T > 4 # define SRE_MAXREPEAT (~(SRE_CODE)0) #else -# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX + 1u) +# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX) #endif typedef struct { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 18:31:39 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 3 Aug 2013 18:31:39 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2317998=3A_Fix_an_internal_error_in_regular_expre?= =?utf-8?q?ssion_engine=2E?= Message-ID: <3c6rNW2Q55zPmC@mail.python.org> http://hg.python.org/cpython/rev/36702442ffe0 changeset: 84990:36702442ffe0 parent: 84987:9cc42abeae06 parent: 84989:86b8b035529b user: Serhiy Storchaka date: Sat Aug 03 19:22:28 2013 +0300 summary: Issue #17998: Fix an internal error in regular expression engine. files: Lib/test/test_re.py | 10 ++++++++++ Misc/NEWS | 2 ++ Modules/_sre.c | 12 ++++++------ Modules/sre.h | 2 +- 4 files changed, 19 insertions(+), 7 deletions(-) diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -1040,6 +1040,16 @@ with self.assertRaisesRegex(sre_constants.error, '\?foo'): re.compile('(?P)') + def test_issue17998(self): + for reps in '*', '+', '?', '{1}': + for mod in '', '?': + pattern = '.' + reps + mod + 'yz' + self.assertEqual(re.compile(pattern, re.S).findall('xyz'), + ['xyz'], msg=pattern) + pattern = pattern.encode() + self.assertEqual(re.compile(pattern, re.S).findall(b'xyz'), + [b'xyz'], msg=pattern) + def run_re_tests(): from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -181,6 +181,8 @@ Library ------- +- Issue #17998: Fix an internal error in regular expression engine. + - Issue #17557: Fix os.getgroups() to work with the modified behavior of getgroups(2) on OS X 10.8. Original patch by Mateusz Lenik. diff --git a/Modules/_sre.c b/Modules/_sre.c --- a/Modules/_sre.c +++ b/Modules/_sre.c @@ -997,7 +997,7 @@ TRACE(("|%p|%p|REPEAT_ONE %d %d\n", ctx->pattern, ctx->ptr, ctx->pattern[1], ctx->pattern[2])); - if (ctx->pattern[1] > (end - ctx->ptr) / state->charsize) + if ((Py_ssize_t) ctx->pattern[1] > (end - ctx->ptr) / state->charsize) RETURN_FAILURE; /* cannot match */ state->ptr = ctx->ptr; @@ -1081,7 +1081,7 @@ TRACE(("|%p|%p|MIN_REPEAT_ONE %d %d\n", ctx->pattern, ctx->ptr, ctx->pattern[1], ctx->pattern[2])); - if (ctx->pattern[1] > (end - ctx->ptr) / state->charsize) + if ((Py_ssize_t) ctx->pattern[1] > (end - ctx->ptr) / state->charsize) RETURN_FAILURE; /* cannot match */ state->ptr = ctx->ptr; @@ -1180,7 +1180,7 @@ TRACE(("|%p|%p|MAX_UNTIL %d\n", ctx->pattern, ctx->ptr, ctx->count)); - if (ctx->count < ctx->u.rep->pattern[1]) { + if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { /* not enough matches */ ctx->u.rep->count = ctx->count; DO_JUMP(JUMP_MAX_UNTIL_1, jump_max_until_1, @@ -1194,7 +1194,7 @@ RETURN_FAILURE; } - if ((ctx->count < ctx->u.rep->pattern[2] || + if ((ctx->count < (Py_ssize_t) ctx->u.rep->pattern[2] || ctx->u.rep->pattern[2] == SRE_MAXREPEAT) && state->ptr != ctx->u.rep->last_ptr) { /* we may have enough matches, but if we can @@ -1243,7 +1243,7 @@ TRACE(("|%p|%p|MIN_UNTIL %d %p\n", ctx->pattern, ctx->ptr, ctx->count, ctx->u.rep->pattern)); - if (ctx->count < ctx->u.rep->pattern[1]) { + if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { /* not enough matches */ ctx->u.rep->count = ctx->count; DO_JUMP(JUMP_MIN_UNTIL_1, jump_min_until_1, @@ -1272,7 +1272,7 @@ LASTMARK_RESTORE(); - if ((ctx->count >= ctx->u.rep->pattern[2] + if ((ctx->count >= (Py_ssize_t) ctx->u.rep->pattern[2] && ctx->u.rep->pattern[2] != SRE_MAXREPEAT) || state->ptr == ctx->u.rep->last_ptr) RETURN_FAILURE; diff --git a/Modules/sre.h b/Modules/sre.h --- a/Modules/sre.h +++ b/Modules/sre.h @@ -19,7 +19,7 @@ #if SIZEOF_SIZE_T > 4 # define SRE_MAXREPEAT (~(SRE_CODE)0) #else -# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX + 1u) +# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX) #endif typedef struct { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 18:31:40 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 3 Aug 2013 18:31:40 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE3OTk4?= =?utf-8?q?=3A_Fix_an_internal_error_in_regular_expression_engine=2E?= Message-ID: <3c6rNX5gbQzPln@mail.python.org> http://hg.python.org/cpython/rev/e5e425fd1e4f changeset: 84991:e5e425fd1e4f branch: 2.7 parent: 84975:8205e72b5cfc user: Serhiy Storchaka date: Sat Aug 03 19:26:33 2013 +0300 summary: Issue #17998: Fix an internal error in regular expression engine. files: Lib/test/test_re.py | 10 ++++++++++ Misc/NEWS | 2 ++ Modules/_sre.c | 12 ++++++------ Modules/sre.h | 4 ++-- 4 files changed, 20 insertions(+), 8 deletions(-) diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -897,6 +897,16 @@ with self.assertRaisesRegexp(sre_constants.error, '\?foo'): re.compile('(?P)') + def test_issue17998(self): + for reps in '*', '+', '?', '{1}': + for mod in '', '?': + pattern = '.' + reps + mod + 'yz' + self.assertEqual(re.compile(pattern, re.S).findall('xyz'), + ['xyz'], msg=pattern) + pattern = pattern.encode() + self.assertEqual(re.compile(pattern, re.S).findall(b'xyz'), + [b'xyz'], msg=pattern) + def run_re_tests(): from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -26,6 +26,8 @@ Library ------- +- Issue #17998: Fix an internal error in regular expression engine. + - Issue #17557: Fix os.getgroups() to work with the modified behavior of getgroups(2) on OS X 10.8. Original patch by Mateusz Lenik. diff --git a/Modules/_sre.c b/Modules/_sre.c --- a/Modules/_sre.c +++ b/Modules/_sre.c @@ -1028,7 +1028,7 @@ TRACE(("|%p|%p|REPEAT_ONE %d %d\n", ctx->pattern, ctx->ptr, ctx->pattern[1], ctx->pattern[2])); - if (ctx->pattern[1] > end - ctx->ptr) + if ((Py_ssize_t) ctx->pattern[1] > end - ctx->ptr) RETURN_FAILURE; /* cannot match */ state->ptr = ctx->ptr; @@ -1111,7 +1111,7 @@ TRACE(("|%p|%p|MIN_REPEAT_ONE %d %d\n", ctx->pattern, ctx->ptr, ctx->pattern[1], ctx->pattern[2])); - if (ctx->pattern[1] > end - ctx->ptr) + if ((Py_ssize_t) ctx->pattern[1] > end - ctx->ptr) RETURN_FAILURE; /* cannot match */ state->ptr = ctx->ptr; @@ -1210,7 +1210,7 @@ TRACE(("|%p|%p|MAX_UNTIL %d\n", ctx->pattern, ctx->ptr, ctx->count)); - if (ctx->count < ctx->u.rep->pattern[1]) { + if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { /* not enough matches */ ctx->u.rep->count = ctx->count; DO_JUMP(JUMP_MAX_UNTIL_1, jump_max_until_1, @@ -1224,7 +1224,7 @@ RETURN_FAILURE; } - if ((ctx->count < ctx->u.rep->pattern[2] || + if ((ctx->count < (Py_ssize_t) ctx->u.rep->pattern[2] || ctx->u.rep->pattern[2] == SRE_MAXREPEAT) && state->ptr != ctx->u.rep->last_ptr) { /* we may have enough matches, but if we can @@ -1273,7 +1273,7 @@ TRACE(("|%p|%p|MIN_UNTIL %d %p\n", ctx->pattern, ctx->ptr, ctx->count, ctx->u.rep->pattern)); - if (ctx->count < ctx->u.rep->pattern[1]) { + if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { /* not enough matches */ ctx->u.rep->count = ctx->count; DO_JUMP(JUMP_MIN_UNTIL_1, jump_min_until_1, @@ -1302,7 +1302,7 @@ LASTMARK_RESTORE(); - if ((ctx->count >= ctx->u.rep->pattern[2] + if ((ctx->count >= (Py_ssize_t) ctx->u.rep->pattern[2] && ctx->u.rep->pattern[2] != SRE_MAXREPEAT) || state->ptr == ctx->u.rep->last_ptr) RETURN_FAILURE; diff --git a/Modules/sre.h b/Modules/sre.h --- a/Modules/sre.h +++ b/Modules/sre.h @@ -20,14 +20,14 @@ # if SIZEOF_SIZE_T > 4 # define SRE_MAXREPEAT (~(SRE_CODE)0) # else -# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX + 1u) +# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX) # endif #else # define SRE_CODE unsigned int # if SIZEOF_SIZE_T > SIZEOF_INT # define SRE_MAXREPEAT (~(SRE_CODE)0) # else -# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX + 1u) +# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX) # endif #endif -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 18:31:42 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 3 Aug 2013 18:31:42 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_default_-=3E_default?= =?utf-8?q?=29=3A_Merge_heads?= Message-ID: <3c6rNZ0N02zSdn@mail.python.org> http://hg.python.org/cpython/rev/ab1859ba1a78 changeset: 84992:ab1859ba1a78 parent: 84990:36702442ffe0 parent: 84988:d86aec3f61b0 user: Serhiy Storchaka date: Sat Aug 03 19:28:28 2013 +0300 summary: Merge heads files: Lib/test/test_wave.py | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_wave.py b/Lib/test/test_wave.py --- a/Lib/test/test_wave.py +++ b/Lib/test/test_wave.py @@ -75,7 +75,6 @@ with self.assertRaises(wave.Error): with wave.open(TESTFN, 'wb') as f: pass - print('in test:', f._file) with self.assertRaises(wave.Error): with open(TESTFN, 'wb') as testfile: with wave.open(testfile): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 20:13:39 2013 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 3 Aug 2013 20:13:39 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE2MDY3?= =?utf-8?q?=3A_Add_description_into_MSI_file_to_replace_installer=27s_temp?= =?utf-8?q?orary?= Message-ID: <3c6tfC5F3qzR1T@mail.python.org> http://hg.python.org/cpython/rev/797b1d13d16e changeset: 84993:797b1d13d16e branch: 3.3 parent: 84989:86b8b035529b user: Martin v. L?wis date: Sat Aug 03 20:09:42 2013 +0200 summary: Issue #16067: Add description into MSI file to replace installer's temporary name. files: Misc/NEWS | 2 ++ Tools/msi/msi.py | 5 ++++- 2 files changed, 6 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -304,6 +304,8 @@ Build ----- +- Issue #16067: Add description into MSI file to replace installer's temporary name. + - Issue #18256: Compilation fix for recent AIX releases. Patch by David Edelsohn. diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -1410,7 +1410,10 @@ # certname (from config.py) should be (a substring of) # the certificate subject, e.g. "Python Software Foundation" if certname: - os.system('signtool sign /n "%s" /t http://timestamp.verisign.com/scripts/timestamp.dll %s' % (certname, msiname)) + os.system('signtool sign /n "%s" ' + '/t http://timestamp.verisign.com/scripts/timestamp.dll ' + '/d "Python %s" ' + '%s' % (certname, full_current_version, msiname)) if pdbzip: build_pdbzip() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 20:13:41 2013 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 3 Aug 2013 20:13:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2316067=3A_Merge_with_3=2E3?= Message-ID: <3c6tfF09SrzRN6@mail.python.org> http://hg.python.org/cpython/rev/7d661f47f73b changeset: 84994:7d661f47f73b parent: 84992:ab1859ba1a78 parent: 84993:797b1d13d16e user: Martin v. L?wis date: Sat Aug 03 20:12:45 2013 +0200 summary: Issue #16067: Merge with 3.3 files: Misc/NEWS | 3 +++ Tools/msi/msi.py | 5 ++++- 2 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -763,6 +763,9 @@ Build ----- +- Issue #16067: Add description into MSI file to replace installer's + temporary name. + - Issue #18257: Fix readlink usage in python-config. Install the python version again on Darwin. diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -1415,7 +1415,10 @@ # certname (from config.py) should be (a substring of) # the certificate subject, e.g. "Python Software Foundation" if certname: - os.system('signtool sign /n "%s" /t http://timestamp.verisign.com/scripts/timestamp.dll %s' % (certname, msiname)) + os.system('signtool sign /n "%s" ' + '/t http://timestamp.verisign.com/scripts/timestamp.dll ' + '/d "Python %s" ' + '%s' % (certname, full_current_version, msiname)) if pdbzip: build_pdbzip() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 20:20:48 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 3 Aug 2013 20:20:48 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE2NzQx?= =?utf-8?q?=3A_Fix_an_error_reporting_in_int=28=29=2E?= Message-ID: <3c6tpS0yX1zRNx@mail.python.org> http://hg.python.org/cpython/rev/ecc8512b427d changeset: 84995:ecc8512b427d branch: 3.3 parent: 84989:86b8b035529b user: Serhiy Storchaka date: Sat Aug 03 20:55:06 2013 +0300 summary: Issue #16741: Fix an error reporting in int(). files: Include/longobject.h | 1 + Lib/test/test_int.py | 47 ++++++++++----- Misc/NEWS | 2 + Objects/abstract.c | 29 +-------- Objects/longobject.c | 93 ++++++++++++++++++++++--------- 5 files changed, 103 insertions(+), 69 deletions(-) diff --git a/Include/longobject.h b/Include/longobject.h --- a/Include/longobject.h +++ b/Include/longobject.h @@ -84,6 +84,7 @@ #ifndef Py_LIMITED_API PyAPI_FUNC(PyObject *) PyLong_FromUnicode(Py_UNICODE*, Py_ssize_t, int); PyAPI_FUNC(PyObject *) PyLong_FromUnicodeObject(PyObject *u, int base); +PyAPI_FUNC(PyObject *) _PyLong_FromBytes(const char *, Py_ssize_t, int); #endif #ifndef Py_LIMITED_API diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py --- a/Lib/test/test_int.py +++ b/Lib/test/test_int.py @@ -73,14 +73,6 @@ x = -1-sys.maxsize self.assertEqual(x >> 1, x//2) - self.assertRaises(ValueError, int, '123\0') - self.assertRaises(ValueError, int, '53', 40) - - # SF bug 1545497: embedded NULs were not detected with - # explicit base - self.assertRaises(ValueError, int, '123\0', 10) - self.assertRaises(ValueError, int, '123\x00 245', 20) - x = int('1' * 600) self.assertIsInstance(x, int) @@ -360,14 +352,37 @@ int(TruncReturnsBadInt()) def test_error_message(self): - testlist = ('\xbd', '123\xbd', ' 123 456 ') - for s in testlist: - try: - int(s) - except ValueError as e: - self.assertIn(s.strip(), e.args[0]) - else: - self.fail("Expected int(%r) to raise a ValueError", s) + def check(s, base=None): + with self.assertRaises(ValueError, + msg="int(%r, %r)" % (s, base)) as cm: + if base is None: + int(s) + else: + int(s, base) + self.assertEqual(cm.exception.args[0], + "invalid literal for int() with base %d: %r" % + (10 if base is None else base, s)) + + check('\xbd') + check('123\xbd') + check(' 123 456 ') + + check('123\x00') + # SF bug 1545497: embedded NULs were not detected with explicit base + check('123\x00', 10) + check('123\x00 245', 20) + check('123\x00 245', 16) + check('123\x00245', 20) + check('123\x00245', 16) + # byte string with embedded NUL + check(b'123\x00') + check(b'123\x00', 10) + # non-UTF-8 byte string + check(b'123\xbd') + check(b'123\xbd', 10) + # lone surrogate in Unicode string + check('123\ud800') + check('123\ud800', 10) def test_main(): support.run_unittest(IntTestCases) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -12,6 +12,8 @@ Core and Builtins ----------------- +- Issue #16741: Fix an error reporting in int(). + - Issue #17899: Fix rare file descriptor leak in os.listdir(). - Issue #18552: Check return value of PyArena_AddPyObject() in diff --git a/Objects/abstract.c b/Objects/abstract.c --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -1240,25 +1240,6 @@ } -/* Add a check for embedded NULL-bytes in the argument. */ -static PyObject * -long_from_string(const char *s, Py_ssize_t len) -{ - char *end; - PyObject *x; - - x = PyLong_FromString((char*)s, &end, 10); - if (x == NULL) - return NULL; - if (end != s + len) { - PyErr_SetString(PyExc_ValueError, - "null byte in argument for int()"); - Py_DECREF(x); - return NULL; - } - return x; -} - PyObject * PyNumber_Long(PyObject *o) { @@ -1306,16 +1287,16 @@ if (PyBytes_Check(o)) /* need to do extra error checking that PyLong_FromString() - * doesn't do. In particular int('9.5') must raise an - * exception, not truncate the float. + * doesn't do. In particular int('9\x005') must raise an + * exception, not truncate at the null. */ - return long_from_string(PyBytes_AS_STRING(o), - PyBytes_GET_SIZE(o)); + return _PyLong_FromBytes(PyBytes_AS_STRING(o), + PyBytes_GET_SIZE(o), 10); if (PyUnicode_Check(o)) /* The above check is done in PyLong_FromUnicode(). */ return PyLong_FromUnicodeObject(o, 10); if (!PyObject_AsCharBuffer(o, &buffer, &buffer_len)) - return long_from_string(buffer, buffer_len); + return _PyLong_FromBytes(buffer, buffer_len, 10); return type_error("int() argument must be a string or a " "number, not '%.200s'", o); diff --git a/Objects/longobject.c b/Objects/longobject.c --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -2005,6 +2005,14 @@ return long_normalize(z); } +/* Parses a long from a bytestring. Leading and trailing whitespace will be + * ignored. + * + * If successful, a PyLong object will be returned and 'pend' will be pointing + * to the first unused byte unless it's NULL. + * + * If unsuccessful, NULL will be returned. + */ PyObject * PyLong_FromString(char *str, char **pend, int base) { @@ -2267,12 +2275,17 @@ str++; if (*str != '\0') goto onError; - if (pend) + long_normalize(z); + z = maybe_small_long(z); + if (z == NULL) + return NULL; + if (pend != NULL) *pend = str; - long_normalize(z); - return (PyObject *) maybe_small_long(z); + return (PyObject *) z; onError: + if (pend != NULL) + *pend = str; Py_XDECREF(z); slen = strlen(orig_str) < 200 ? strlen(orig_str) : 200; strobj = PyUnicode_FromStringAndSize(orig_str, slen); @@ -2285,6 +2298,31 @@ return NULL; } +/* Since PyLong_FromString doesn't have a length parameter, + * check here for possible NULs in the string. + * + * Reports an invalid literal as a bytes object. + */ +PyObject * +_PyLong_FromBytes(const char *s, Py_ssize_t len, int base) +{ + PyObject *result, *strobj; + char *end = NULL; + + result = PyLong_FromString((char*)s, &end, base); + if (end == NULL || (result != NULL && end == s + len)) + return result; + Py_XDECREF(result); + strobj = PyBytes_FromStringAndSize(s, Py_MIN(len, 200)); + if (strobj != NULL) { + PyErr_Format(PyExc_ValueError, + "invalid literal for int() with base %d: %R", + base, strobj); + Py_DECREF(strobj); + } + return NULL; +} + PyObject * PyLong_FromUnicode(Py_UNICODE *u, Py_ssize_t length, int base) { @@ -2299,9 +2337,8 @@ PyObject * PyLong_FromUnicodeObject(PyObject *u, int base) { - PyObject *result; - PyObject *asciidig; - char *buffer, *end; + PyObject *result, *asciidig, *strobj; + char *buffer, *end = NULL; Py_ssize_t buflen; asciidig = _PyUnicode_TransformDecimalAndSpaceToASCII(u); @@ -2310,17 +2347,26 @@ buffer = PyUnicode_AsUTF8AndSize(asciidig, &buflen); if (buffer == NULL) { Py_DECREF(asciidig); - return NULL; - } - result = PyLong_FromString(buffer, &end, base); - if (result != NULL && end != buffer + buflen) { - PyErr_SetString(PyExc_ValueError, - "null byte in argument for int()"); - Py_DECREF(result); - result = NULL; - } - Py_DECREF(asciidig); - return result; + if (!PyErr_ExceptionMatches(PyExc_UnicodeEncodeError)) + return NULL; + } + else { + result = PyLong_FromString(buffer, &end, base); + if (end == NULL || (result != NULL && end == buffer + buflen)) { + Py_DECREF(asciidig); + return result; + } + Py_DECREF(asciidig); + Py_XDECREF(result); + } + strobj = PySequence_GetSlice(u, 0, 200); + if (strobj != NULL) { + PyErr_Format(PyExc_ValueError, + "invalid literal for int() with base %d: %R", + base, strobj); + Py_DECREF(strobj); + } + return NULL; } /* forward */ @@ -4308,23 +4354,12 @@ if (PyUnicode_Check(x)) return PyLong_FromUnicodeObject(x, (int)base); else if (PyByteArray_Check(x) || PyBytes_Check(x)) { - /* Since PyLong_FromString doesn't have a length parameter, - * check here for possible NULs in the string. */ char *string; - Py_ssize_t size = Py_SIZE(x); if (PyByteArray_Check(x)) string = PyByteArray_AS_STRING(x); else string = PyBytes_AS_STRING(x); - if (strlen(string) != (size_t)size || !size) { - /* We only see this if there's a null byte in x or x is empty, - x is a bytes or buffer, *and* a base is given. */ - PyErr_Format(PyExc_ValueError, - "invalid literal for int() with base %d: %R", - (int)base, x); - return NULL; - } - return PyLong_FromString(string, NULL, (int)base); + return _PyLong_FromBytes(string, Py_SIZE(x), (int)base); } else { PyErr_SetString(PyExc_TypeError, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 20:20:49 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 3 Aug 2013 20:20:49 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2316741=3A_Fix_an_error_reporting_in_int=28=29=2E?= Message-ID: <3c6tpT4CGfzRky@mail.python.org> http://hg.python.org/cpython/rev/4fd48a807812 changeset: 84996:4fd48a807812 parent: 84992:ab1859ba1a78 parent: 84995:ecc8512b427d user: Serhiy Storchaka date: Sat Aug 03 21:14:05 2013 +0300 summary: Issue #16741: Fix an error reporting in int(). files: Include/longobject.h | 1 + Lib/test/test_int.py | 47 ++++++++++----- Misc/NEWS | 2 + Objects/abstract.c | 29 +-------- Objects/longobject.c | 91 +++++++++++++++++++++---------- 5 files changed, 100 insertions(+), 70 deletions(-) diff --git a/Include/longobject.h b/Include/longobject.h --- a/Include/longobject.h +++ b/Include/longobject.h @@ -97,6 +97,7 @@ #ifndef Py_LIMITED_API PyAPI_FUNC(PyObject *) PyLong_FromUnicode(Py_UNICODE*, Py_ssize_t, int); PyAPI_FUNC(PyObject *) PyLong_FromUnicodeObject(PyObject *u, int base); +PyAPI_FUNC(PyObject *) _PyLong_FromBytes(const char *, Py_ssize_t, int); #endif #ifndef Py_LIMITED_API diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py --- a/Lib/test/test_int.py +++ b/Lib/test/test_int.py @@ -73,14 +73,6 @@ x = -1-sys.maxsize self.assertEqual(x >> 1, x//2) - self.assertRaises(ValueError, int, '123\0') - self.assertRaises(ValueError, int, '53', 40) - - # SF bug 1545497: embedded NULs were not detected with - # explicit base - self.assertRaises(ValueError, int, '123\0', 10) - self.assertRaises(ValueError, int, '123\x00 245', 20) - x = int('1' * 600) self.assertIsInstance(x, int) @@ -401,14 +393,37 @@ int(TruncReturnsBadInt()) def test_error_message(self): - testlist = ('\xbd', '123\xbd', ' 123 456 ') - for s in testlist: - try: - int(s) - except ValueError as e: - self.assertIn(s.strip(), e.args[0]) - else: - self.fail("Expected int(%r) to raise a ValueError", s) + def check(s, base=None): + with self.assertRaises(ValueError, + msg="int(%r, %r)" % (s, base)) as cm: + if base is None: + int(s) + else: + int(s, base) + self.assertEqual(cm.exception.args[0], + "invalid literal for int() with base %d: %r" % + (10 if base is None else base, s)) + + check('\xbd') + check('123\xbd') + check(' 123 456 ') + + check('123\x00') + # SF bug 1545497: embedded NULs were not detected with explicit base + check('123\x00', 10) + check('123\x00 245', 20) + check('123\x00 245', 16) + check('123\x00245', 20) + check('123\x00245', 16) + # byte string with embedded NUL + check(b'123\x00') + check(b'123\x00', 10) + # non-UTF-8 byte string + check(b'123\xbd') + check(b'123\xbd', 10) + # lone surrogate in Unicode string + check('123\ud800') + check('123\ud800', 10) def test_main(): support.run_unittest(IntTestCases) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #16741: Fix an error reporting in int(). + - Issue #17899: Fix rare file descriptor leak in os.listdir(). - Issue #9035: ismount now recognises volumes mounted below a drive root diff --git a/Objects/abstract.c b/Objects/abstract.c --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -1261,25 +1261,6 @@ } -/* Add a check for embedded NULL-bytes in the argument. */ -static PyObject * -long_from_string(const char *s, Py_ssize_t len) -{ - char *end; - PyObject *x; - - x = PyLong_FromString((char*)s, &end, 10); - if (x == NULL) - return NULL; - if (end != s + len) { - PyErr_SetString(PyExc_ValueError, - "null byte in argument for int()"); - Py_DECREF(x); - return NULL; - } - return x; -} - PyObject * PyNumber_Long(PyObject *o) { @@ -1327,16 +1308,16 @@ if (PyBytes_Check(o)) /* need to do extra error checking that PyLong_FromString() - * doesn't do. In particular int('9.5') must raise an - * exception, not truncate the float. + * doesn't do. In particular int('9\x005') must raise an + * exception, not truncate at the null. */ - return long_from_string(PyBytes_AS_STRING(o), - PyBytes_GET_SIZE(o)); + return _PyLong_FromBytes(PyBytes_AS_STRING(o), + PyBytes_GET_SIZE(o), 10); if (PyUnicode_Check(o)) /* The above check is done in PyLong_FromUnicode(). */ return PyLong_FromUnicodeObject(o, 10); if (!PyObject_AsCharBuffer(o, &buffer, &buffer_len)) - return long_from_string(buffer, buffer_len); + return _PyLong_FromBytes(buffer, buffer_len, 10); return type_error("int() argument must be a string or a " "number, not '%.200s'", o); diff --git a/Objects/longobject.c b/Objects/longobject.c --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -2000,6 +2000,14 @@ return long_normalize(z); } +/* Parses a long from a bytestring. Leading and trailing whitespace will be + * ignored. + * + * If successful, a PyLong object will be returned and 'pend' will be pointing + * to the first unused byte unless it's NULL. + * + * If unsuccessful, NULL will be returned. + */ PyObject * PyLong_FromString(char *str, char **pend, int base) { @@ -2262,24 +2270,54 @@ str++; if (*str != '\0') goto onError; - if (pend) + long_normalize(z); + z = maybe_small_long(z); + if (z == NULL) + return NULL; + if (pend != NULL) *pend = str; - long_normalize(z); - return (PyObject *) maybe_small_long(z); + return (PyObject *) z; onError: + if (pend != NULL) + *pend = str; Py_XDECREF(z); slen = strlen(orig_str) < 200 ? strlen(orig_str) : 200; strobj = PyUnicode_FromStringAndSize(orig_str, slen); if (strobj == NULL) return NULL; PyErr_Format(PyExc_ValueError, - "invalid literal for int() with base %d: %R", + "invalid literal for int() with base %d: %.200R", base, strobj); Py_DECREF(strobj); return NULL; } +/* Since PyLong_FromString doesn't have a length parameter, + * check here for possible NULs in the string. + * + * Reports an invalid literal as a bytes object. + */ +PyObject * +_PyLong_FromBytes(const char *s, Py_ssize_t len, int base) +{ + PyObject *result, *strobj; + char *end = NULL; + + result = PyLong_FromString((char*)s, &end, base); + if (end == NULL || (result != NULL && end == s + len)) + return result; + Py_XDECREF(result); + strobj = PyBytes_FromStringAndSize(s, Py_MIN(len, 200)); + if (strobj != NULL) { + PyErr_Format(PyExc_ValueError, + "invalid literal for int() with base %d: %.200R", + base, strobj); + Py_DECREF(strobj); + } + return NULL; +} + PyObject * PyLong_FromUnicode(Py_UNICODE *u, Py_ssize_t length, int base) { @@ -2294,9 +2332,8 @@ PyObject * PyLong_FromUnicodeObject(PyObject *u, int base) { - PyObject *result; - PyObject *asciidig; - char *buffer, *end; + PyObject *result, *asciidig; + char *buffer, *end = NULL; Py_ssize_t buflen; asciidig = _PyUnicode_TransformDecimalAndSpaceToASCII(u); @@ -2305,17 +2342,22 @@ buffer = PyUnicode_AsUTF8AndSize(asciidig, &buflen); if (buffer == NULL) { Py_DECREF(asciidig); - return NULL; - } - result = PyLong_FromString(buffer, &end, base); - if (result != NULL && end != buffer + buflen) { - PyErr_SetString(PyExc_ValueError, - "null byte in argument for int()"); - Py_DECREF(result); - result = NULL; - } - Py_DECREF(asciidig); - return result; + if (!PyErr_ExceptionMatches(PyExc_UnicodeEncodeError)) + return NULL; + } + else { + result = PyLong_FromString(buffer, &end, base); + if (end == NULL || (result != NULL && end == buffer + buflen)) { + Py_DECREF(asciidig); + return result; + } + Py_DECREF(asciidig); + Py_XDECREF(result); + } + PyErr_Format(PyExc_ValueError, + "invalid literal for int() with base %d: %.200R", + base, u); + return NULL; } /* forward */ @@ -4319,23 +4361,12 @@ if (PyUnicode_Check(x)) return PyLong_FromUnicodeObject(x, (int)base); else if (PyByteArray_Check(x) || PyBytes_Check(x)) { - /* Since PyLong_FromString doesn't have a length parameter, - * check here for possible NULs in the string. */ char *string; - Py_ssize_t size = Py_SIZE(x); if (PyByteArray_Check(x)) string = PyByteArray_AS_STRING(x); else string = PyBytes_AS_STRING(x); - if (strlen(string) != (size_t)size || !size) { - /* We only see this if there's a null byte in x or x is empty, - x is a bytes or buffer, *and* a base is given. */ - PyErr_Format(PyExc_ValueError, - "invalid literal for int() with base %d: %R", - (int)base, x); - return NULL; - } - return PyLong_FromString(string, NULL, (int)base); + return _PyLong_FromBytes(string, Py_SIZE(x), (int)base); } else { PyErr_SetString(PyExc_TypeError, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 20:20:50 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 3 Aug 2013 20:20:50 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAobWVyZ2UgMy4zIC0+IDMuMyk6?= =?utf-8?q?_Merge_heads?= Message-ID: <3c6tpV6BzgzRYw@mail.python.org> http://hg.python.org/cpython/rev/6793ce8e803d changeset: 84997:6793ce8e803d branch: 3.3 parent: 84995:ecc8512b427d parent: 84993:797b1d13d16e user: Serhiy Storchaka date: Sat Aug 03 21:17:04 2013 +0300 summary: Merge heads files: Misc/NEWS | 2 ++ Tools/msi/msi.py | 5 ++++- 2 files changed, 6 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -306,6 +306,8 @@ Build ----- +- Issue #16067: Add description into MSI file to replace installer's temporary name. + - Issue #18256: Compilation fix for recent AIX releases. Patch by David Edelsohn. diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -1410,7 +1410,10 @@ # certname (from config.py) should be (a substring of) # the certificate subject, e.g. "Python Software Foundation" if certname: - os.system('signtool sign /n "%s" /t http://timestamp.verisign.com/scripts/timestamp.dll %s' % (certname, msiname)) + os.system('signtool sign /n "%s" ' + '/t http://timestamp.verisign.com/scripts/timestamp.dll ' + '/d "Python %s" ' + '%s' % (certname, full_current_version, msiname)) if pdbzip: build_pdbzip() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 20:20:52 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 3 Aug 2013 20:20:52 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_default_-=3E_default?= =?utf-8?q?=29=3A_Merge_heads?= Message-ID: <3c6tpX18t7zSHC@mail.python.org> http://hg.python.org/cpython/rev/673ef3f96919 changeset: 84998:673ef3f96919 parent: 84996:4fd48a807812 parent: 84994:7d661f47f73b user: Serhiy Storchaka date: Sat Aug 03 21:17:27 2013 +0300 summary: Merge heads files: Misc/NEWS | 3 +++ Tools/msi/msi.py | 5 ++++- 2 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -765,6 +765,9 @@ Build ----- +- Issue #16067: Add description into MSI file to replace installer's + temporary name. + - Issue #18257: Fix readlink usage in python-config. Install the python version again on Darwin. diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -1415,7 +1415,10 @@ # certname (from config.py) should be (a substring of) # the certificate subject, e.g. "Python Software Foundation" if certname: - os.system('signtool sign /n "%s" /t http://timestamp.verisign.com/scripts/timestamp.dll %s' % (certname, msiname)) + os.system('signtool sign /n "%s" ' + '/t http://timestamp.verisign.com/scripts/timestamp.dll ' + '/d "Python %s" ' + '%s' % (certname, full_current_version, msiname)) if pdbzip: build_pdbzip() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 20:20:53 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 3 Aug 2013 20:20:53 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Null_merge?= Message-ID: <3c6tpY338szSBS@mail.python.org> http://hg.python.org/cpython/rev/e9cecb612ff7 changeset: 84999:e9cecb612ff7 parent: 84998:673ef3f96919 parent: 84997:6793ce8e803d user: Serhiy Storchaka date: Sat Aug 03 21:19:10 2013 +0300 summary: Null merge files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 20:24:42 2013 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 3 Aug 2013 20:24:42 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE2MDY3?= =?utf-8?q?=3A_Add_description_into_MSI_file_to_replace_installer=27s_temp?= =?utf-8?q?orary?= Message-ID: <3c6tty44gszSHC@mail.python.org> http://hg.python.org/cpython/rev/ee0bdc007a0f changeset: 85000:ee0bdc007a0f branch: 2.7 parent: 84991:e5e425fd1e4f user: Martin v. L?wis date: Sat Aug 03 20:24:00 2013 +0200 summary: Issue #16067: Add description into MSI file to replace installer's temporary name. files: Misc/NEWS | 2 ++ Tools/msi/msi.py | 5 ++++- 2 files changed, 6 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -98,6 +98,8 @@ Build ----- +- Issue #16067: Add description into MSI file to replace installer's temporary name. + - Issue #18256: Compilation fix for recent AIX releases. Patch by David Edelsohn. diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -1392,7 +1392,10 @@ # certname (from config.py) should be (a substring of) # the certificate subject, e.g. "Python Software Foundation" if certname: - os.system('signtool sign /n "%s" /t http://timestamp.verisign.com/scripts/timestamp.dll %s' % (certname, msiname)) + os.system('signtool sign /n "%s" ' + '/t http://timestamp.verisign.com/scripts/timestamp.dll ' + '/d "Python %s" ' + '%s' % (certname, full_current_version, msiname)) if pdbzip: build_pdbzip() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 22:48:45 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 3 Aug 2013 22:48:45 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NjQ3?= =?utf-8?q?=3A_Temporary_disable_the_=22nothing_to_repeat=22_check_to_make?= =?utf-8?q?_buildbots?= Message-ID: <3c6y592Gh5zRky@mail.python.org> http://hg.python.org/cpython/rev/c243896e12be changeset: 85001:c243896e12be branch: 3.3 parent: 84997:6793ce8e803d user: Serhiy Storchaka date: Sat Aug 03 23:46:19 2013 +0300 summary: Issue #18647: Temporary disable the "nothing to repeat" check to make buildbots happy. files: Lib/sre_compile.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/sre_compile.py b/Lib/sre_compile.py --- a/Lib/sre_compile.py +++ b/Lib/sre_compile.py @@ -358,8 +358,8 @@ def _simple(av): # check if av is a "simple" operator lo, hi = av[2].getwidth() - if lo == 0 and hi == MAXREPEAT: - raise error("nothing to repeat") + #if lo == 0 and hi == MAXREPEAT: + # raise error("nothing to repeat") return lo == hi == 1 and av[2][0][0] != SUBPATTERN def _compile_info(code, pattern, flags): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 3 22:48:46 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 3 Aug 2013 22:48:46 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318647=3A_Temporary_disable_the_=22nothing_to_re?= =?utf-8?q?peat=22_check_to_make_buildbots?= Message-ID: <3c6y5B4CH3zSTC@mail.python.org> http://hg.python.org/cpython/rev/4faf9b73c3df changeset: 85002:4faf9b73c3df parent: 84999:e9cecb612ff7 parent: 85001:c243896e12be user: Serhiy Storchaka date: Sat Aug 03 23:47:48 2013 +0300 summary: Issue #18647: Temporary disable the "nothing to repeat" check to make buildbots happy. files: Lib/sre_compile.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/sre_compile.py b/Lib/sre_compile.py --- a/Lib/sre_compile.py +++ b/Lib/sre_compile.py @@ -351,8 +351,8 @@ def _simple(av): # check if av is a "simple" operator lo, hi = av[2].getwidth() - if lo == 0 and hi == MAXREPEAT: - raise error("nothing to repeat") + #if lo == 0 and hi == MAXREPEAT: + # raise error("nothing to repeat") return lo == hi == 1 and av[2][0][0] != SUBPATTERN def _compile_info(code, pattern, flags): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 00:06:08 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sun, 4 Aug 2013 00:06:08 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE2NzQx?= =?utf-8?q?=3A_Remove_testing_of_implementation_artifact=2E?= Message-ID: <3c6zpS02XQzSdn@mail.python.org> http://hg.python.org/cpython/rev/7b023134ad83 changeset: 85003:7b023134ad83 branch: 3.3 parent: 85001:c243896e12be user: Serhiy Storchaka date: Sun Aug 04 01:04:15 2013 +0300 summary: Issue #16741: Remove testing of implementation artifact. files: Lib/test/test_unicode.py | 2 -- 1 files changed, 0 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -1718,8 +1718,6 @@ self.assertRaises(TypeError, "hello".encode, 42, 42, 42) # Error handling (lone surrogate in PyUnicode_TransformDecimalToASCII()) - self.assertRaises(UnicodeError, int, "\ud800") - self.assertRaises(UnicodeError, int, "\udf00") self.assertRaises(UnicodeError, float, "\ud800") self.assertRaises(UnicodeError, float, "\udf00") self.assertRaises(UnicodeError, complex, "\ud800") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 00:06:09 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sun, 4 Aug 2013 00:06:09 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2316741=3A_Remove_testing_of_implementation_artif?= =?utf-8?q?act=2E?= Message-ID: <3c6zpT1yt5zSg2@mail.python.org> http://hg.python.org/cpython/rev/1b4772ab420f changeset: 85004:1b4772ab420f parent: 85002:4faf9b73c3df parent: 85003:7b023134ad83 user: Serhiy Storchaka date: Sun Aug 04 01:05:02 2013 +0300 summary: Issue #16741: Remove testing of implementation artifact. files: Lib/test/test_unicode.py | 2 -- 1 files changed, 0 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -1735,8 +1735,6 @@ self.assertRaises(TypeError, "hello".encode, 42, 42, 42) # Error handling (lone surrogate in PyUnicode_TransformDecimalToASCII()) - self.assertRaises(UnicodeError, int, "\ud800") - self.assertRaises(UnicodeError, int, "\udf00") self.assertRaises(UnicodeError, float, "\ud800") self.assertRaises(UnicodeError, float, "\udf00") self.assertRaises(UnicodeError, complex, "\ud800") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 02:49:01 2013 From: python-checkins at python.org (eli.bendersky) Date: Sun, 4 Aug 2013 02:49:01 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE3MDEx?= =?utf-8?q?=3A_Fix_caching_of_xpath_path_when_namespaces_are_present=2E?= Message-ID: <3c73QP6q7gzS0x@mail.python.org> http://hg.python.org/cpython/rev/854ded9135c2 changeset: 85005:854ded9135c2 branch: 3.3 parent: 85003:7b023134ad83 user: Eli Bendersky date: Sat Aug 03 17:47:47 2013 -0700 summary: Issue #17011: Fix caching of xpath path when namespaces are present. Thanks to Stefan Behnel for the report and proposed solution & test. files: Lib/test/test_xml_etree.py | 14 ++++++++++++++ Lib/xml/etree/ElementPath.py | 6 ++++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -1679,6 +1679,20 @@ summarize_list(e.findall(".//{http://effbot.org/ns}tag")), ['{http://effbot.org/ns}tag'] * 3) + def test_findall_different_nsmaps(self): + root = ET.XML(''' + + + + + ''') + nsmap = {'xx': 'X'} + self.assertEqual(len(root.findall(".//xx:b", namespaces=nsmap)), 2) + self.assertEqual(len(root.findall(".//b", namespaces=nsmap)), 2) + nsmap = {'xx': 'Y'} + self.assertEqual(len(root.findall(".//xx:b", namespaces=nsmap)), 1) + self.assertEqual(len(root.findall(".//b", namespaces=nsmap)), 2) + def test_bad_find(self): e = ET.XML(SAMPLE_XML) with self.assertRaisesRegex(SyntaxError, 'cannot use absolute path'): diff --git a/Lib/xml/etree/ElementPath.py b/Lib/xml/etree/ElementPath.py --- a/Lib/xml/etree/ElementPath.py +++ b/Lib/xml/etree/ElementPath.py @@ -246,10 +246,12 @@ def iterfind(elem, path, namespaces=None): # compile selector pattern + cache_key = (path, None if namespaces is None + else tuple(sorted(namespaces.items()))) if path[-1:] == "/": path = path + "*" # implicit all (FIXME: keep this?) try: - selector = _cache[path] + selector = _cache[cache_key] except KeyError: if len(_cache) > 100: _cache.clear() @@ -269,7 +271,7 @@ token = next() except StopIteration: break - _cache[path] = selector + _cache[cache_key] = selector # execute selector pattern result = [elem] context = _SelectorContext(elem) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 02:49:03 2013 From: python-checkins at python.org (eli.bendersky) Date: Sun, 4 Aug 2013 02:49:03 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_fix_for_Issue_=2317011_from_3=2E3?= Message-ID: <3c73QR1dCrz7LkC@mail.python.org> http://hg.python.org/cpython/rev/ce0be0d03c0a changeset: 85006:ce0be0d03c0a parent: 85004:1b4772ab420f parent: 85005:854ded9135c2 user: Eli Bendersky date: Sat Aug 03 17:48:41 2013 -0700 summary: Merge fix for Issue #17011 from 3.3 files: Lib/test/test_xml_etree.py | 14 ++++++++++++++ Lib/xml/etree/ElementPath.py | 6 ++++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -1839,6 +1839,20 @@ summarize_list(e.findall(".//{http://effbot.org/ns}tag")), ['{http://effbot.org/ns}tag'] * 3) + def test_findall_different_nsmaps(self): + root = ET.XML(''' + + + + + ''') + nsmap = {'xx': 'X'} + self.assertEqual(len(root.findall(".//xx:b", namespaces=nsmap)), 2) + self.assertEqual(len(root.findall(".//b", namespaces=nsmap)), 2) + nsmap = {'xx': 'Y'} + self.assertEqual(len(root.findall(".//xx:b", namespaces=nsmap)), 1) + self.assertEqual(len(root.findall(".//b", namespaces=nsmap)), 2) + def test_bad_find(self): e = ET.XML(SAMPLE_XML) with self.assertRaisesRegex(SyntaxError, 'cannot use absolute path'): diff --git a/Lib/xml/etree/ElementPath.py b/Lib/xml/etree/ElementPath.py --- a/Lib/xml/etree/ElementPath.py +++ b/Lib/xml/etree/ElementPath.py @@ -249,10 +249,12 @@ def iterfind(elem, path, namespaces=None): # compile selector pattern + cache_key = (path, None if namespaces is None + else tuple(sorted(namespaces.items()))) if path[-1:] == "/": path = path + "*" # implicit all (FIXME: keep this?) try: - selector = _cache[path] + selector = _cache[cache_key] except KeyError: if len(_cache) > 100: _cache.clear() @@ -272,7 +274,7 @@ token = next() except StopIteration: break - _cache[path] = selector + _cache[cache_key] = selector # execute selector pattern result = [elem] context = _SelectorContext(elem) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 03:55:31 2013 From: python-checkins at python.org (eli.bendersky) Date: Sun, 4 Aug 2013 03:55:31 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE3OTAy?= =?utf-8?q?=3A_Clarify_doc_of_ElementTree=2Eiterparse?= Message-ID: <3c74v71PKfzStl@mail.python.org> http://hg.python.org/cpython/rev/a5a5ba4f71ad changeset: 85007:a5a5ba4f71ad branch: 3.3 parent: 85005:854ded9135c2 user: Eli Bendersky date: Sat Aug 03 18:52:32 2013 -0700 summary: Issue #17902: Clarify doc of ElementTree.iterparse files: Doc/library/xml.etree.elementtree.rst | 3 ++- 1 files changed, 2 insertions(+), 1 deletions(-) diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -384,7 +384,8 @@ and ``"end-ns"`` (the "ns" events are used to get detailed namespace information). If *events* is omitted, only ``"end"`` events are reported. *parser* is an optional parser instance. If not given, the standard - :class:`XMLParser` parser is used. Returns an :term:`iterator` providing + :class:`XMLParser` parser is used. *parser* can only use the default + :class:`TreeBuilder` as a target. Returns an :term:`iterator` providing ``(event, elem)`` pairs. .. note:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 03:55:32 2013 From: python-checkins at python.org (eli.bendersky) Date: Sun, 4 Aug 2013 03:55:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2317902=3A_Clarify_doc_of_ElementTree=2Eiterparse?= =?utf-8?q?_and_IncrementalParser?= Message-ID: <3c74v83SzhzPqh@mail.python.org> http://hg.python.org/cpython/rev/96f45011957e changeset: 85008:96f45011957e parent: 85006:ce0be0d03c0a parent: 85007:a5a5ba4f71ad user: Eli Bendersky date: Sat Aug 03 18:55:10 2013 -0700 summary: Issue #17902: Clarify doc of ElementTree.iterparse and IncrementalParser Based on patch by Aaron Oakley files: Doc/library/xml.etree.elementtree.rst | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -416,7 +416,8 @@ and ``"end-ns"`` (the "ns" events are used to get detailed namespace information). If *events* is omitted, only ``"end"`` events are reported. *parser* is an optional parser instance. If not given, the standard - :class:`XMLParser` parser is used. Returns an :term:`iterator` providing + :class:`XMLParser` parser is used. *parser* can only use the default + :class:`TreeBuilder` as a target. Returns an :term:`iterator` providing ``(event, elem)`` pairs. Note that while :func:`iterparse` builds the tree incrementally, it issues @@ -880,7 +881,7 @@ events are used to get detailed namespace information). If *events* is omitted, only ``"end"`` events are reported. *parser* is an optional parser instance. If not given, the standard :class:`XMLParser` parser is - used. + used. *parser* can only use the default :class:`TreeBuilder` as a target. .. method:: data_received(data) -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sun Aug 4 05:48:25 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sun, 04 Aug 2013 05:48:25 +0200 Subject: [Python-checkins] Daily reference leaks (ce0be0d03c0a): sum=0 Message-ID: results for ce0be0d03c0a on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/refloge9iHNt', '-x'] From python-checkins at python.org Sun Aug 4 08:29:33 2013 From: python-checkins at python.org (nick.coghlan) Date: Sun, 4 Aug 2013 08:29:33 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Try_to_clean_up_PEP_426_namin?= =?utf-8?q?g_rules?= Message-ID: <3c7BzK4Tw2zQvS@mail.python.org> http://hg.python.org/peps/rev/e4e32122be07 changeset: 5030:e4e32122be07 user: Nick Coghlan date: Sun Aug 04 16:29:14 2013 +1000 summary: Try to clean up PEP 426 naming rules files: pep-0426.txt | 123 +++++++++++++++++------ pep-0426/pydist-schema.json | 15 +- 2 files changed, 100 insertions(+), 38 deletions(-) diff --git a/pep-0426.txt b/pep-0426.txt --- a/pep-0426.txt +++ b/pep-0426.txt @@ -247,10 +247,17 @@ "Dist" is the preferred abbreviation for "distributions" in the sense defined in this PEP. -"Qualified name" comes from PEP 3155, and refers to the dotted name of an -object relative to its containing module. This is useful for referring -to method definitions on classes, as well as any other attributes of -top level module objects. +"Qualified name" is a dotted Python identifier. For imported modules and +packages, the qualified name is available as the ``__name__`` attribute, +while for functions and classes it is available as the ``__qualname__`` +attribute. + +A "fully qualified name" uniquely locates an object in the Python module +namespace. For imported modules and packages, it is the same as the +qualified name. For other Python objects, the fully qualified name consists +of the qualified name of the containing module or package, a colon (``:``) +and the qualified name of the object relative to the containing module or +package. Integration and deployment of distributions @@ -1440,19 +1447,28 @@ Export specifiers ----------------- -An export specifier is a string using one of the following formats:: +An export specifier is a string consisting of a fully qualified name, as +well as an optional extra name enclosed in square brackets. This gives the +following four possible forms for an export specifier:: module module:name module[requires_extra] module:name[requires_extra] +.. note:: + + The jsonschema file currently restricts qualified names using the + Python 2 ASCII identifier rules. This may need to be reconsidered + given the more relaxed identifier rules in Python 3. + The meaning of the subfields is as follows: * ``module``: the module providing the export * ``name``: if applicable, the qualified name of the export within the module * ``requires_extra``: indicates the export will only work correctly if the - additional dependencies named in the given extra are available. + additional dependencies named in the given extra are available in the + installed environment .. note:: @@ -1465,12 +1481,33 @@ Modules ------- -A list of module names that the distribution provides for import. +A list of qualified names of modules and packages that the distribution +provides for import. + +.. note:: + + The jsonschema file currently restricts qualified names using the + Python 2 ASCII identifier rules. This may need to be reconsidered + given the more relaxed identifier rules in Python 3. For names that contain dots, the portion of the name before the final dot MUST appear either in the installed module list or in the namespace package list. +To help avoid name conflicts, it is RECOMMENDED that distributions provide +a single top level module or package that matches the distribution name +(or a lower case equivalent). This requires that the distribution name also +meet the requirements of a Python identifier, which are stricter than +those for distribution names). This practice will also make it easier to +find authoritative sources for modules. + +Index servers SHOULD allow multiple distributions to publish the same +modules, but MAY notify distribution authors of potential conflicts. + +Installation tools SHOULD report an error when asked to install a +distribution that provides a module that is also provided by a different, +previously installed, distribution. + Note that attempting to import some declared modules may result in an exception if the appropriate extras are not installed. @@ -1490,7 +1527,14 @@ Namespaces ---------- -A list of namespace packages that the distribution contributes modules to. +A list of qualified names of namespace packages that the distribution +contributes modules to. + +.. note:: + + The jsonschema file currently restricts qualified names using the + Python 2 ASCII identifier rules. This may need to be reconsidered + given the more relaxed identifier rules in Python 3. On versions of Python prior to Python 3.3 (which provides native namespace package support), installation tools SHOULD emit a suitable ``__init__.py`` @@ -1498,8 +1542,8 @@ provided file. Installation tools SHOULD emit a warning and MAY emit an error if a -distribution declares a namespace package that conflicts the name of an -already installed module or vice-versa. +distribution declares a namespace package that conflicts with the name of +an already installed module or vice-versa. Example:: @@ -1529,7 +1573,7 @@ .. note:: - Still needs more detail on what "appropriate wrapper" means. For now, + Still needs more detail on what "appropriate wrappers" means. For now, refer to what setuptools and zc.buildout generate as wrapper scripts. ``prebuilt`` is a list of script paths, relative to the scripts directory in @@ -1537,6 +1581,12 @@ purpose only - installing them is handled through the normal processes for files created when building a distribution. +Index servers SHOULD allow multiple distributions to publish the same +commands, but MAY notify distribution authors of potential conflicts. + +Installation tools SHOULD report an error when asked to install a +distribution that provides a command that is also provided by a different, +previously installed, distribution. Example:: @@ -1551,22 +1601,28 @@ Exports ------- -The ``exports`` field is a mapping containing dotted names as keys. Each -key defines an export group. Export group names SHOULD correspond to -module names in the distribution responsible that defines the meaning -of the export group. - -Each export group is then a mapping of arbitrary non-empty string keys -to export specifiers. The interpretation of the individual export keys is -defined by the distribution that i - -Both export group names and export names must follow the rules for -distribution identifiers. It is suggested that export groups be named -after distributions to help avoid name conflicts. - -The meaning of exports within an export group is up to the distribution -that defines the export group. One common use case is to allow other -distributions to advertise plugins for use by the defining distribution. +The ``exports`` field is a mapping containing qualified names as keys. Each +key identifies an export group containing one or more exports published by +the distribution. + +Export group names are defined by distributions that will then make use of +the published export information in some way. The primary use case is for +distributions that support a plugin model: defining an export group allows +other distributions to indicate which plugins they provide, how they +can be imported and accessed, and which additional dependencies (if any) +are needed for the plugin to work correctly. + +To reduce the chance of name conflicts, export group names SHOULD use a +prefix that corresponds to a module name in the distribution that defines +the meaning of the export group. This practice will also make it easier to +find authoritative documentation for export groups. + +Each individual export group is then a mapping of arbitrary non-empty string +keys to export specifiers. The meaning of export names within an export +group is up to the distribution that defines the export group. Creating an +appropriate definition for the export name format can allow the importing +distribution to determine whether or not an export is relevant without +needing to import every exporting module. Install hooks @@ -1682,17 +1738,20 @@ =================== Extensions to the metadata may be present in a mapping under the -'extensions' key. The keys must meet the same restrictions as -distribution names, while the values may be any type natively supported -in JSON:: +'extensions' key. The keys must be valid qualified names, while +the values may be any type natively supported in JSON:: "extensions" : { "chili" : { "type" : "Poblano", "heat" : "Mild" }, "languages" : [ "French", "Italian", "Hebrew" ] } -To avoid name conflicts, it is RECOMMENDED that distribution names be used -to identify metadata extensions. This practice will also make it easier to +Extension names are defined by distributions that will then make use of +the additional published metadata in some way. + +To reduce the chance of name conflicts, extension names SHOULD use a +prefix that corresponds to a module name in the distribution that defines +the meaning of the extension. This practice will also make it easier to find authoritative documentation for metadata extensions. Metadata extensions allow development tools to record information in the diff --git a/pep-0426/pydist-schema.json b/pep-0426/pydist-schema.json --- a/pep-0426/pydist-schema.json +++ b/pep-0426/pydist-schema.json @@ -141,7 +141,7 @@ "type": "array", "items": { "type": "string", - "$ref": "#/definitions/dotted_name" + "$ref": "#/definitions/qualified_name" } }, "namespaces": { @@ -149,7 +149,7 @@ "type": "array", "items": { "type": "string", - "$ref": "#/definitions/dotted_name" + "$ref": "#/definitions/qualified_name" } }, "commands": { @@ -290,9 +290,10 @@ "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$": { "type": "string", "$ref": "#/definitions/export_specifier" + } }, "additionalProperties": false - }, + }, "distribution_name": { "type": "string", "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$" @@ -310,17 +311,19 @@ "type": "string" }, "extra_name" : { - "type": "string" + "type": "string", + "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$" }, "relative_path" : { "type": "string" }, "export_specifier": { "type": "string", + "pattern": "^([A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)(:[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)?(\\[[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?\\])?$" }, - "dotted_name" : { + "qualified_name" : { "type": "string", - "pattern": "^[A-Za-z]([0-9A-Za-z_])*([.][A-Za-z]([0-9A-Za-z_])*)*$" + "pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*$" } } } -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sun Aug 4 08:33:15 2013 From: python-checkins at python.org (larry.hastings) Date: Sun, 4 Aug 2013 08:33:15 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Updated_pydoc_topics_for_P?= =?utf-8?q?ython_3=2E4=2E0a1_release=2E?= Message-ID: <3c7C3b55bFzSg2@mail.python.org> http://hg.python.org/cpython/rev/17e507b4a456 changeset: 85009:17e507b4a456 parent: 84988:d86aec3f61b0 user: Larry Hastings date: Sat Aug 03 12:47:53 2013 -0700 summary: Updated pydoc topics for Python 3.4.0a1 release. files: Lib/pydoc_data/topics.py | 24 ++++++++++++------------ 1 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Sat Mar 23 15:42:31 2013 +# Autogenerated by Sphinx on Sat Aug 3 12:46:08 2013 topics = {'assert': '\nThe ``assert`` statement\n************************\n\nAssert statements are a convenient way to insert debugging assertions\ninto a program:\n\n assert_stmt ::= "assert" expression ["," expression]\n\nThe simple form, ``assert expression``, is equivalent to\n\n if __debug__:\n if not expression: raise AssertionError\n\nThe extended form, ``assert expression1, expression2``, is equivalent\nto\n\n if __debug__:\n if not expression1: raise AssertionError(expression2)\n\nThese equivalences assume that ``__debug__`` and ``AssertionError``\nrefer to the built-in variables with those names. In the current\nimplementation, the built-in variable ``__debug__`` is ``True`` under\nnormal circumstances, ``False`` when optimization is requested\n(command line option -O). The current code generator emits no code\nfor an assert statement when optimization is requested at compile\ntime. Note that it is unnecessary to include the source code for the\nexpression that failed in the error message; it will be displayed as\npart of the stack trace.\n\nAssignments to ``__debug__`` are illegal. The value for the built-in\nvariable is determined when the interpreter starts.\n', 'assignment': '\nAssignment statements\n*********************\n\nAssignment statements are used to (re)bind names to values and to\nmodify attributes or items of mutable objects:\n\n assignment_stmt ::= (target_list "=")+ (expression_list | yield_expression)\n target_list ::= target ("," target)* [","]\n target ::= identifier\n | "(" target_list ")"\n | "[" target_list "]"\n | attributeref\n | subscription\n | slicing\n | "*" target\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn assignment statement evaluates the expression list (remember that\nthis can be a single expression or a comma-separated list, the latter\nyielding a tuple) and assigns the single resulting object to each of\nthe target lists, from left to right.\n\nAssignment is defined recursively depending on the form of the target\n(list). When a target is part of a mutable object (an attribute\nreference, subscription or slicing), the mutable object must\nultimately perform the assignment and decide about its validity, and\nmay raise an exception if the assignment is unacceptable. The rules\nobserved by various types and the exceptions raised are given with the\ndefinition of the object types (see section *The standard type\nhierarchy*).\n\nAssignment of an object to a target list, optionally enclosed in\nparentheses or square brackets, is recursively defined as follows.\n\n* If the target list is a single target: The object is assigned to\n that target.\n\n* If the target list is a comma-separated list of targets: The object\n must be an iterable with the same number of items as there are\n targets in the target list, and the items are assigned, from left to\n right, to the corresponding targets.\n\n * If the target list contains one target prefixed with an asterisk,\n called a "starred" target: The object must be a sequence with at\n least as many items as there are targets in the target list, minus\n one. The first items of the sequence are assigned, from left to\n right, to the targets before the starred target. The final items\n of the sequence are assigned to the targets after the starred\n target. A list of the remaining items in the sequence is then\n assigned to the starred target (the list can be empty).\n\n * Else: The object must be a sequence with the same number of items\n as there are targets in the target list, and the items are\n assigned, from left to right, to the corresponding targets.\n\nAssignment of an object to a single target is recursively defined as\nfollows.\n\n* If the target is an identifier (name):\n\n * If the name does not occur in a ``global`` or ``nonlocal``\n statement in the current code block: the name is bound to the\n object in the current local namespace.\n\n * Otherwise: the name is bound to the object in the global namespace\n or the outer namespace determined by ``nonlocal``, respectively.\n\n The name is rebound if it was already bound. This may cause the\n reference count for the object previously bound to the name to reach\n zero, causing the object to be deallocated and its destructor (if it\n has one) to be called.\n\n* If the target is a target list enclosed in parentheses or in square\n brackets: The object must be an iterable with the same number of\n items as there are targets in the target list, and its items are\n assigned, from left to right, to the corresponding targets.\n\n* If the target is an attribute reference: The primary expression in\n the reference is evaluated. It should yield an object with\n assignable attributes; if this is not the case, ``TypeError`` is\n raised. That object is then asked to assign the assigned object to\n the given attribute; if it cannot perform the assignment, it raises\n an exception (usually but not necessarily ``AttributeError``).\n\n Note: If the object is a class instance and the attribute reference\n occurs on both sides of the assignment operator, the RHS expression,\n ``a.x`` can access either an instance attribute or (if no instance\n attribute exists) a class attribute. The LHS target ``a.x`` is\n always set as an instance attribute, creating it if necessary.\n Thus, the two occurrences of ``a.x`` do not necessarily refer to the\n same attribute: if the RHS expression refers to a class attribute,\n the LHS creates a new instance attribute as the target of the\n assignment:\n\n class Cls:\n x = 3 # class variable\n inst = Cls()\n inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x as 3\n\n This description does not necessarily apply to descriptor\n attributes, such as properties created with ``property()``.\n\n* If the target is a subscription: The primary expression in the\n reference is evaluated. It should yield either a mutable sequence\n object (such as a list) or a mapping object (such as a dictionary).\n Next, the subscript expression is evaluated.\n\n If the primary is a mutable sequence object (such as a list), the\n subscript must yield an integer. If it is negative, the sequence\'s\n length is added to it. The resulting value must be a nonnegative\n integer less than the sequence\'s length, and the sequence is asked\n to assign the assigned object to its item with that index. If the\n index is out of range, ``IndexError`` is raised (assignment to a\n subscripted sequence cannot add new items to a list).\n\n If the primary is a mapping object (such as a dictionary), the\n subscript must have a type compatible with the mapping\'s key type,\n and the mapping is then asked to create a key/datum pair which maps\n the subscript to the assigned object. This can either replace an\n existing key/value pair with the same key value, or insert a new\n key/value pair (if no key with the same value existed).\n\n For user-defined objects, the ``__setitem__()`` method is called\n with appropriate arguments.\n\n* If the target is a slicing: The primary expression in the reference\n is evaluated. It should yield a mutable sequence object (such as a\n list). The assigned object should be a sequence object of the same\n type. Next, the lower and upper bound expressions are evaluated,\n insofar they are present; defaults are zero and the sequence\'s\n length. The bounds should evaluate to integers. If either bound is\n negative, the sequence\'s length is added to it. The resulting\n bounds are clipped to lie between zero and the sequence\'s length,\n inclusive. Finally, the sequence object is asked to replace the\n slice with the items of the assigned sequence. The length of the\n slice may be different from the length of the assigned sequence,\n thus changing the length of the target sequence, if the object\n allows it.\n\n**CPython implementation detail:** In the current implementation, the\nsyntax for targets is taken to be the same as for expressions, and\ninvalid syntax is rejected during the code generation phase, causing\nless detailed error messages.\n\nWARNING: Although the definition of assignment implies that overlaps\nbetween the left-hand side and the right-hand side are \'safe\' (for\nexample ``a, b = b, a`` swaps two variables), overlaps *within* the\ncollection of assigned-to variables are not safe! For instance, the\nfollowing program prints ``[0, 2]``:\n\n x = [0, 1]\n i = 0\n i, x[i] = 1, 2\n print(x)\n\nSee also:\n\n **PEP 3132** - Extended Iterable Unpacking\n The specification for the ``*target`` feature.\n\n\nAugmented assignment statements\n===============================\n\nAugmented assignment is the combination, in a single statement, of a\nbinary operation and an assignment statement:\n\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\n augtarget ::= identifier | attributeref | subscription | slicing\n augop ::= "+=" | "-=" | "*=" | "/=" | "//=" | "%=" | "**="\n | ">>=" | "<<=" | "&=" | "^=" | "|="\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn augmented assignment evaluates the target (which, unlike normal\nassignment statements, cannot be an unpacking) and the expression\nlist, performs the binary operation specific to the type of assignment\non the two operands, and assigns the result to the original target.\nThe target is only evaluated once.\n\nAn augmented assignment expression like ``x += 1`` can be rewritten as\n``x = x + 1`` to achieve a similar, but not exactly equal effect. In\nthe augmented version, ``x`` is only evaluated once. Also, when\npossible, the actual operation is performed *in-place*, meaning that\nrather than creating a new object and assigning that to the target,\nthe old object is modified instead.\n\nWith the exception of assigning to tuples and multiple targets in a\nsingle statement, the assignment done by augmented assignment\nstatements is handled the same way as normal assignments. Similarly,\nwith the exception of the possible *in-place* behavior, the binary\noperation performed by augmented assignment is the same as the normal\nbinary operations.\n\nFor targets which are attribute references, the same *caveat about\nclass and instance attributes* applies as for regular assignments.\n', - 'atom-identifiers': '\nIdentifiers (Names)\n*******************\n\nAn identifier occurring as an atom is a name. See section\n*Identifiers and keywords* for lexical definition and section *Naming\nand binding* for documentation of naming and binding.\n\nWhen the name is bound to an object, evaluation of the atom yields\nthat object. When a name is not bound, an attempt to evaluate it\nraises a ``NameError`` exception.\n\n**Private name mangling:** When an identifier that textually occurs in\na class definition begins with two or more underscore characters and\ndoes not end in two or more underscores, it is considered a *private\nname* of that class. Private names are transformed to a longer form\nbefore code is generated for them. The transformation inserts the\nclass name in front of the name, with leading underscores removed, and\na single underscore inserted in front of the class name. For example,\nthe identifier ``__spam`` occurring in a class named ``Ham`` will be\ntransformed to ``_Ham__spam``. This transformation is independent of\nthe syntactical context in which the identifier is used. If the\ntransformed name is extremely long (longer than 255 characters),\nimplementation defined truncation may happen. If the class name\nconsists only of underscores, no transformation is done.\n', + 'atom-identifiers': '\nIdentifiers (Names)\n*******************\n\nAn identifier occurring as an atom is a name. See section\n*Identifiers and keywords* for lexical definition and section *Naming\nand binding* for documentation of naming and binding.\n\nWhen the name is bound to an object, evaluation of the atom yields\nthat object. When a name is not bound, an attempt to evaluate it\nraises a ``NameError`` exception.\n\n**Private name mangling:** When an identifier that textually occurs in\na class definition begins with two or more underscore characters and\ndoes not end in two or more underscores, it is considered a *private\nname* of that class. Private names are transformed to a longer form\nbefore code is generated for them. The transformation inserts the\nclass name, with leading underscores removed and a single underscore\ninserted, in front of the name. For example, the identifier\n``__spam`` occurring in a class named ``Ham`` will be transformed to\n``_Ham__spam``. This transformation is independent of the syntactical\ncontext in which the identifier is used. If the transformed name is\nextremely long (longer than 255 characters), implementation defined\ntruncation may happen. If the class name consists only of underscores,\nno transformation is done.\n', 'atom-literals': "\nLiterals\n********\n\nPython supports string and bytes literals and various numeric\nliterals:\n\n literal ::= stringliteral | bytesliteral\n | integer | floatnumber | imagnumber\n\nEvaluation of a literal yields an object of the given type (string,\nbytes, integer, floating point number, complex number) with the given\nvalue. The value may be approximated in the case of floating point\nand imaginary (complex) literals. See section *Literals* for details.\n\nAll literals correspond to immutable data types, and hence the\nobject's identity is less important than its value. Multiple\nevaluations of literals with the same value (either the same\noccurrence in the program text or a different occurrence) may obtain\nthe same object or a different object with the same value.\n", 'attribute-access': '\nCustomizing attribute access\n****************************\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of ``x.name``)\nfor class instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for ``self``). ``name`` is the attribute name.\n This method should return the (computed) attribute value or raise\n an ``AttributeError`` exception.\n\n Note that if the attribute is found through the normal mechanism,\n ``__getattr__()`` is not called. (This is an intentional asymmetry\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\n for efficiency reasons and because otherwise ``__getattr__()``\n would have no way to access other attributes of the instance. Note\n that at least for instance variables, you can fake total control by\n not inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n ``__getattribute__()`` method below for a way to actually get total\n control over attribute access.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines\n ``__getattr__()``, the latter will not be called unless\n ``__getattribute__()`` either calls it explicitly or raises an\n ``AttributeError``. This method should return the (computed)\n attribute value or raise an ``AttributeError`` exception. In order\n to avoid infinite recursion in this method, its implementation\n should always call the base class method with the same name to\n access any attributes it needs, for example,\n ``object.__getattribute__(self, name)``.\n\n Note: This method may still be bypassed when looking up special methods\n as the result of implicit invocation via language syntax or\n built-in functions. See *Special method lookup*.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If ``__setattr__()`` wants to assign to an instance attribute, it\n should call the base class method with the same name, for example,\n ``object.__setattr__(self, name, value)``.\n\nobject.__delattr__(self, name)\n\n Like ``__setattr__()`` but for attribute deletion instead of\n assignment. This should only be implemented if ``del obj.name`` is\n meaningful for the object.\n\nobject.__dir__(self)\n\n Called when ``dir()`` is called on the object. A sequence must be\n returned. ``dir()`` converts the returned sequence to a list and\n sorts it.\n\n\nImplementing Descriptors\n========================\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' ``__dict__``.\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or ``None`` when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an\n ``AttributeError`` exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\n\nInvoking Descriptors\n====================\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\n``__delete__()``. If any of those methods are defined for an object,\nit is said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, ``a.x`` has a\nlookup chain starting with ``a.__dict__[\'x\']``, then\n``type(a).__dict__[\'x\']``, and continuing through the base classes of\n``type(a)`` excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called.\n\nThe starting point for descriptor invocation is a binding, ``a.x``.\nHow the arguments are assembled depends on ``a``:\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: ``x.__get__(a)``.\n\nInstance Binding\n If binding to an object instance, ``a.x`` is transformed into the\n call: ``type(a).__dict__[\'x\'].__get__(a, type(a))``.\n\nClass Binding\n If binding to a class, ``A.x`` is transformed into the call:\n ``A.__dict__[\'x\'].__get__(None, A)``.\n\nSuper Binding\n If ``a`` is an instance of ``super``, then the binding ``super(B,\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\n ``A`` immediately preceding ``B`` and then invokes the descriptor\n with the call: ``A.__dict__[\'m\'].__get__(obj, obj.__class__)``.\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\nIf it does not define ``__get__()``, then accessing the attribute will\nreturn the descriptor object itself unless there is a value in the\nobject\'s instance dictionary. If the descriptor defines ``__set__()``\nand/or ``__delete__()``, it is a data descriptor; if it defines\nneither, it is a non-data descriptor. Normally, data descriptors\ndefine both ``__get__()`` and ``__set__()``, while non-data\ndescriptors have just the ``__get__()`` method. Data descriptors with\n``__set__()`` and ``__get__()`` defined always override a redefinition\nin an instance dictionary. In contrast, non-data descriptors can be\noverridden by instances.\n\nPython methods (including ``staticmethod()`` and ``classmethod()``)\nare implemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe ``property()`` function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n=========\n\nBy default, instances of classes have a dictionary for attribute\nstorage. This wastes space for objects having very few instance\nvariables. The space consumption can become acute when creating large\nnumbers of instances.\n\nThe default can be overridden by defining *__slots__* in a class\ndefinition. The *__slots__* declaration takes a sequence of instance\nvariables and reserves just enough space in each instance to hold a\nvalue for each variable. Space is saved because *__dict__* is not\ncreated for each instance.\n\nobject.__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n class, *__slots__* reserves space for the declared variables and\n prevents the automatic creation of *__dict__* and *__weakref__* for\n each instance.\n\n\nNotes on using *__slots__*\n--------------------------\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises ``AttributeError``. If\n dynamic assignment of new variables is desired, then add\n ``\'__dict__\'`` to the sequence of strings in the *__slots__*\n declaration.\n\n* Without a *__weakref__* variable for each instance, classes defining\n *__slots__* do not support weak references to its instances. If weak\n reference support is needed, then add ``\'__weakref__\'`` to the\n sequence of strings in the *__slots__* declaration.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the instance\n variable defined by the base class slot is inaccessible (except by\n retrieving its descriptor directly from the base class). This\n renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as ``int``, ``str`` and\n ``tuple``.\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\n also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n', 'attribute-references': '\nAttribute references\n********************\n\nAn attribute reference is a primary followed by a period and a name:\n\n attributeref ::= primary "." identifier\n\nThe primary must evaluate to an object of a type that supports\nattribute references, which most objects do. This object is then\nasked to produce the attribute whose name is the identifier (which can\nbe customized by overriding the ``__getattr__()`` method). If this\nattribute is not available, the exception ``AttributeError`` is\nraised. Otherwise, the type and value of the object produced is\ndetermined by the object. Multiple evaluations of the same attribute\nreference may yield different objects.\n', @@ -19,11 +19,11 @@ 'calls': '\nCalls\n*****\n\nA call calls a callable object (e.g., a *function*) with a possibly\nempty series of *arguments*:\n\n call ::= primary "(" [argument_list [","] | comprehension] ")"\n argument_list ::= positional_arguments ["," keyword_arguments]\n ["," "*" expression] ["," keyword_arguments]\n ["," "**" expression]\n | keyword_arguments ["," "*" expression]\n ["," keyword_arguments] ["," "**" expression]\n | "*" expression ["," keyword_arguments] ["," "**" expression]\n | "**" expression\n positional_arguments ::= expression ("," expression)*\n keyword_arguments ::= keyword_item ("," keyword_item)*\n keyword_item ::= identifier "=" expression\n\nA trailing comma may be present after the positional and keyword\narguments but does not affect the semantics.\n\nThe primary must evaluate to a callable object (user-defined\nfunctions, built-in functions, methods of built-in objects, class\nobjects, methods of class instances, and all objects having a\n``__call__()`` method are callable). All argument expressions are\nevaluated before the call is attempted. Please refer to section\n*Function definitions* for the syntax of formal *parameter* lists.\n\nIf keyword arguments are present, they are first converted to\npositional arguments, as follows. First, a list of unfilled slots is\ncreated for the formal parameters. If there are N positional\narguments, they are placed in the first N slots. Next, for each\nkeyword argument, the identifier is used to determine the\ncorresponding slot (if the identifier is the same as the first formal\nparameter name, the first slot is used, and so on). If the slot is\nalready filled, a ``TypeError`` exception is raised. Otherwise, the\nvalue of the argument is placed in the slot, filling it (even if the\nexpression is ``None``, it fills the slot). When all arguments have\nbeen processed, the slots that are still unfilled are filled with the\ncorresponding default value from the function definition. (Default\nvalues are calculated, once, when the function is defined; thus, a\nmutable object such as a list or dictionary used as default value will\nbe shared by all calls that don\'t specify an argument value for the\ncorresponding slot; this should usually be avoided.) If there are any\nunfilled slots for which no default value is specified, a\n``TypeError`` exception is raised. Otherwise, the list of filled\nslots is used as the argument list for the call.\n\n**CPython implementation detail:** An implementation may provide\nbuilt-in functions whose positional parameters do not have names, even\nif they are \'named\' for the purpose of documentation, and which\ntherefore cannot be supplied by keyword. In CPython, this is the case\nfor functions implemented in C that use ``PyArg_ParseTuple()`` to\nparse their arguments.\n\nIf there are more positional arguments than there are formal parameter\nslots, a ``TypeError`` exception is raised, unless a formal parameter\nusing the syntax ``*identifier`` is present; in this case, that formal\nparameter receives a tuple containing the excess positional arguments\n(or an empty tuple if there were no excess positional arguments).\n\nIf any keyword argument does not correspond to a formal parameter\nname, a ``TypeError`` exception is raised, unless a formal parameter\nusing the syntax ``**identifier`` is present; in this case, that\nformal parameter receives a dictionary containing the excess keyword\narguments (using the keywords as keys and the argument values as\ncorresponding values), or a (new) empty dictionary if there were no\nexcess keyword arguments.\n\nIf the syntax ``*expression`` appears in the function call,\n``expression`` must evaluate to an iterable. Elements from this\niterable are treated as if they were additional positional arguments;\nif there are positional arguments *x1*, ..., *xN*, and ``expression``\nevaluates to a sequence *y1*, ..., *yM*, this is equivalent to a call\nwith M+N positional arguments *x1*, ..., *xN*, *y1*, ..., *yM*.\n\nA consequence of this is that although the ``*expression`` syntax may\nappear *after* some keyword arguments, it is processed *before* the\nkeyword arguments (and the ``**expression`` argument, if any -- see\nbelow). So:\n\n >>> def f(a, b):\n ... print(a, b)\n ...\n >>> f(b=1, *(2,))\n 2 1\n >>> f(a=1, *(2,))\n Traceback (most recent call last):\n File "", line 1, in ?\n TypeError: f() got multiple values for keyword argument \'a\'\n >>> f(1, *(2,))\n 1 2\n\nIt is unusual for both keyword arguments and the ``*expression``\nsyntax to be used in the same call, so in practice this confusion does\nnot arise.\n\nIf the syntax ``**expression`` appears in the function call,\n``expression`` must evaluate to a mapping, the contents of which are\ntreated as additional keyword arguments. In the case of a keyword\nappearing in both ``expression`` and as an explicit keyword argument,\na ``TypeError`` exception is raised.\n\nFormal parameters using the syntax ``*identifier`` or ``**identifier``\ncannot be used as positional argument slots or as keyword argument\nnames.\n\nA call always returns some value, possibly ``None``, unless it raises\nan exception. How this value is computed depends on the type of the\ncallable object.\n\nIf it is---\n\na user-defined function:\n The code block for the function is executed, passing it the\n argument list. The first thing the code block will do is bind the\n formal parameters to the arguments; this is described in section\n *Function definitions*. When the code block executes a ``return``\n statement, this specifies the return value of the function call.\n\na built-in function or method:\n The result is up to the interpreter; see *Built-in Functions* for\n the descriptions of built-in functions and methods.\n\na class object:\n A new instance of that class is returned.\n\na class instance method:\n The corresponding user-defined function is called, with an argument\n list that is one longer than the argument list of the call: the\n instance becomes the first argument.\n\na class instance:\n The class must define a ``__call__()`` method; the effect is then\n the same as if that method was called.\n', 'class': '\nClass definitions\n*****************\n\nA class definition defines a class object (see section *The standard\ntype hierarchy*):\n\n classdef ::= [decorators] "class" classname [inheritance] ":" suite\n inheritance ::= "(" [parameter_list] ")"\n classname ::= identifier\n\nA class definition is an executable statement. The inheritance list\nusually gives a list of base classes (see *Customizing class creation*\nfor more advanced uses), so each item in the list should evaluate to a\nclass object which allows subclassing. Classes without an inheritance\nlist inherit, by default, from the base class ``object``; hence,\n\n class Foo:\n pass\n\nis equivalent to\n\n class Foo(object):\n pass\n\nThe class\'s suite is then executed in a new execution frame (see\n*Naming and binding*), using a newly created local namespace and the\noriginal global namespace. (Usually, the suite contains mostly\nfunction definitions.) When the class\'s suite finishes execution, its\nexecution frame is discarded but its local namespace is saved. [4] A\nclass object is then created using the inheritance list for the base\nclasses and the saved local namespace for the attribute dictionary.\nThe class name is bound to this class object in the original local\nnamespace.\n\nClass creation can be customized heavily using *metaclasses*.\n\nClasses can also be decorated: just like when decorating functions,\n\n @f1(arg)\n @f2\n class Foo: pass\n\nis equivalent to\n\n class Foo: pass\n Foo = f1(arg)(f2(Foo))\n\nThe evaluation rules for the decorator expressions are the same as for\nfunction decorators. The result must be a class object, which is then\nbound to the class name.\n\n**Programmer\'s note:** Variables defined in the class definition are\nclass attributes; they are shared by instances. Instance attributes\ncan be set in a method with ``self.name = value``. Both class and\ninstance attributes are accessible through the notation\n"``self.name``", and an instance attribute hides a class attribute\nwith the same name when accessed in this way. Class attributes can be\nused as defaults for instance attributes, but using mutable values\nthere can lead to unexpected results. *Descriptors* can be used to\ncreate instance variables with different implementation details.\n\nSee also:\n\n **PEP 3115** - Metaclasses in Python 3 **PEP 3129** - Class\n Decorators\n\n-[ Footnotes ]-\n\n[1] The exception is propagated to the invocation stack unless there\n is a ``finally`` clause which happens to raise another exception.\n That new exception causes the old one to be lost.\n\n[2] Currently, control "flows off the end" except in the case of an\n exception or the execution of a ``return``, ``continue``, or\n ``break`` statement.\n\n[3] A string literal appearing as the first statement in the function\n body is transformed into the function\'s ``__doc__`` attribute and\n therefore the function\'s *docstring*.\n\n[4] A string literal appearing as the first statement in the class\n body is transformed into the namespace\'s ``__doc__`` item and\n therefore the class\'s *docstring*.\n', 'comparisons': '\nComparisons\n***********\n\nUnlike C, all comparison operations in Python have the same priority,\nwhich is lower than that of any arithmetic, shifting or bitwise\noperation. Also unlike C, expressions like ``a < b < c`` have the\ninterpretation that is conventional in mathematics:\n\n comparison ::= or_expr ( comp_operator or_expr )*\n comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n | "is" ["not"] | ["not"] "in"\n\nComparisons yield boolean values: ``True`` or ``False``.\n\nComparisons can be chained arbitrarily, e.g., ``x < y <= z`` is\nequivalent to ``x < y and y <= z``, except that ``y`` is evaluated\nonly once (but in both cases ``z`` is not evaluated at all when ``x <\ny`` is found to be false).\n\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\n*op2*, ..., *opN* are comparison operators, then ``a op1 b op2 c ... y\nopN z`` is equivalent to ``a op1 b and b op2 c and ... y opN z``,\nexcept that each expression is evaluated at most once.\n\nNote that ``a op1 b op2 c`` doesn\'t imply any kind of comparison\nbetween *a* and *c*, so that, e.g., ``x < y > z`` is perfectly legal\n(though perhaps not pretty).\n\nThe operators ``<``, ``>``, ``==``, ``>=``, ``<=``, and ``!=`` compare\nthe values of two objects. The objects need not have the same type.\nIf both are numbers, they are converted to a common type. Otherwise,\nthe ``==`` and ``!=`` operators *always* consider objects of different\ntypes to be unequal, while the ``<``, ``>``, ``>=`` and ``<=``\noperators raise a ``TypeError`` when comparing objects of different\ntypes that do not implement these operators for the given pair of\ntypes. You can control comparison behavior of objects of non-built-in\ntypes by defining rich comparison methods like ``__gt__()``, described\nin section *Basic customization*.\n\nComparison of objects of the same type depends on the type:\n\n* Numbers are compared arithmetically.\n\n* The values ``float(\'NaN\')`` and ``Decimal(\'NaN\')`` are special. The\n are identical to themselves, ``x is x`` but are not equal to\n themselves, ``x != x``. Additionally, comparing any value to a\n not-a-number value will return ``False``. For example, both ``3 <\n float(\'NaN\')`` and ``float(\'NaN\') < 3`` will return ``False``.\n\n* Bytes objects are compared lexicographically using the numeric\n values of their elements.\n\n* Strings are compared lexicographically using the numeric equivalents\n (the result of the built-in function ``ord()``) of their characters.\n [3] String and bytes object can\'t be compared!\n\n* Tuples and lists are compared lexicographically using comparison of\n corresponding elements. This means that to compare equal, each\n element must compare equal and the two sequences must be of the same\n type and have the same length.\n\n If not equal, the sequences are ordered the same as their first\n differing elements. For example, ``[1,2,x] <= [1,2,y]`` has the\n same value as ``x <= y``. If the corresponding element does not\n exist, the shorter sequence is ordered first (for example, ``[1,2] <\n [1,2,3]``).\n\n* Mappings (dictionaries) compare equal if and only if they have the\n same ``(key, value)`` pairs. Order comparisons ``(\'<\', \'<=\', \'>=\',\n \'>\')`` raise ``TypeError``.\n\n* Sets and frozensets define comparison operators to mean subset and\n superset tests. Those relations do not define total orderings (the\n two sets ``{1,2}`` and {2,3} are not equal, nor subsets of one\n another, nor supersets of one another). Accordingly, sets are not\n appropriate arguments for functions which depend on total ordering.\n For example, ``min()``, ``max()``, and ``sorted()`` produce\n undefined results given a list of sets as inputs.\n\n* Most other objects of built-in types compare unequal unless they are\n the same object; the choice whether one object is considered smaller\n or larger than another one is made arbitrarily but consistently\n within one execution of a program.\n\nComparison of objects of the differing types depends on whether either\nof the types provide explicit support for the comparison. Most\nnumeric types can be compared with one another. When cross-type\ncomparison is not supported, the comparison method returns\n``NotImplemented``.\n\nThe operators ``in`` and ``not in`` test for membership. ``x in s``\nevaluates to true if *x* is a member of *s*, and false otherwise. ``x\nnot in s`` returns the negation of ``x in s``. All built-in sequences\nand set types support this as well as dictionary, for which ``in``\ntests whether a the dictionary has a given key. For container types\nsuch as list, tuple, set, frozenset, dict, or collections.deque, the\nexpression ``x in y`` is equivalent to ``any(x is e or x == e for e in\ny)``.\n\nFor the string and bytes types, ``x in y`` is true if and only if *x*\nis a substring of *y*. An equivalent test is ``y.find(x) != -1``.\nEmpty strings are always considered to be a substring of any other\nstring, so ``"" in "abc"`` will return ``True``.\n\nFor user-defined classes which define the ``__contains__()`` method,\n``x in y`` is true if and only if ``y.__contains__(x)`` is true.\n\nFor user-defined classes which do not define ``__contains__()`` but do\ndefine ``__iter__()``, ``x in y`` is true if some value ``z`` with ``x\n== z`` is produced while iterating over ``y``. If an exception is\nraised during the iteration, it is as if ``in`` raised that exception.\n\nLastly, the old-style iteration protocol is tried: if a class defines\n``__getitem__()``, ``x in y`` is true if and only if there is a non-\nnegative integer index *i* such that ``x == y[i]``, and all lower\ninteger indices do not raise ``IndexError`` exception. (If any other\nexception is raised, it is as if ``in`` raised that exception).\n\nThe operator ``not in`` is defined to have the inverse true value of\n``in``.\n\nThe operators ``is`` and ``is not`` test for object identity: ``x is\ny`` is true if and only if *x* and *y* are the same object. ``x is\nnot y`` yields the inverse truth value. [4]\n', - 'compound': '\nCompound statements\n*******************\n\nCompound statements contain (groups of) other statements; they affect\nor control the execution of those other statements in some way. In\ngeneral, compound statements span multiple lines, although in simple\nincarnations a whole compound statement may be contained in one line.\n\nThe ``if``, ``while`` and ``for`` statements implement traditional\ncontrol flow constructs. ``try`` specifies exception handlers and/or\ncleanup code for a group of statements, while the ``with`` statement\nallows the execution of initialization and finalization code around a\nblock of code. Function and class definitions are also syntactically\ncompound statements.\n\nCompound statements consist of one or more \'clauses.\' A clause\nconsists of a header and a \'suite.\' The clause headers of a\nparticular compound statement are all at the same indentation level.\nEach clause header begins with a uniquely identifying keyword and ends\nwith a colon. A suite is a group of statements controlled by a\nclause. A suite can be one or more semicolon-separated simple\nstatements on the same line as the header, following the header\'s\ncolon, or it can be one or more indented statements on subsequent\nlines. Only the latter form of suite can contain nested compound\nstatements; the following is illegal, mostly because it wouldn\'t be\nclear to which ``if`` clause a following ``else`` clause would belong:\n\n if test1: if test2: print(x)\n\nAlso note that the semicolon binds tighter than the colon in this\ncontext, so that in the following example, either all or none of the\n``print()`` calls are executed:\n\n if x < y < z: print(x); print(y); print(z)\n\nSummarizing:\n\n compound_stmt ::= if_stmt\n | while_stmt\n | for_stmt\n | try_stmt\n | with_stmt\n | funcdef\n | classdef\n suite ::= stmt_list NEWLINE | NEWLINE INDENT statement+ DEDENT\n statement ::= stmt_list NEWLINE | compound_stmt\n stmt_list ::= simple_stmt (";" simple_stmt)* [";"]\n\nNote that statements always end in a ``NEWLINE`` possibly followed by\na ``DEDENT``. Also note that optional continuation clauses always\nbegin with a keyword that cannot start a statement, thus there are no\nambiguities (the \'dangling ``else``\' problem is solved in Python by\nrequiring nested ``if`` statements to be indented).\n\nThe formatting of the grammar rules in the following sections places\neach clause on a separate line for clarity.\n\n\nThe ``if`` statement\n====================\n\nThe ``if`` statement is used for conditional execution:\n\n if_stmt ::= "if" expression ":" suite\n ( "elif" expression ":" suite )*\n ["else" ":" suite]\n\nIt selects exactly one of the suites by evaluating the expressions one\nby one until one is found to be true (see section *Boolean operations*\nfor the definition of true and false); then that suite is executed\n(and no other part of the ``if`` statement is executed or evaluated).\nIf all expressions are false, the suite of the ``else`` clause, if\npresent, is executed.\n\n\nThe ``while`` statement\n=======================\n\nThe ``while`` statement is used for repeated execution as long as an\nexpression is true:\n\n while_stmt ::= "while" expression ":" suite\n ["else" ":" suite]\n\nThis repeatedly tests the expression and, if it is true, executes the\nfirst suite; if the expression is false (which may be the first time\nit is tested) the suite of the ``else`` clause, if present, is\nexecuted and the loop terminates.\n\nA ``break`` statement executed in the first suite terminates the loop\nwithout executing the ``else`` clause\'s suite. A ``continue``\nstatement executed in the first suite skips the rest of the suite and\ngoes back to testing the expression.\n\n\nThe ``for`` statement\n=====================\n\nThe ``for`` statement is used to iterate over the elements of a\nsequence (such as a string, tuple or list) or other iterable object:\n\n for_stmt ::= "for" target_list "in" expression_list ":" suite\n ["else" ":" suite]\n\nThe expression list is evaluated once; it should yield an iterable\nobject. An iterator is created for the result of the\n``expression_list``. The suite is then executed once for each item\nprovided by the iterator, in the order of ascending indices. Each\nitem in turn is assigned to the target list using the standard rules\nfor assignments (see *Assignment statements*), and then the suite is\nexecuted. When the items are exhausted (which is immediately when the\nsequence is empty or an iterator raises a ``StopIteration``\nexception), the suite in the ``else`` clause, if present, is executed,\nand the loop terminates.\n\nA ``break`` statement executed in the first suite terminates the loop\nwithout executing the ``else`` clause\'s suite. A ``continue``\nstatement executed in the first suite skips the rest of the suite and\ncontinues with the next item, or with the ``else`` clause if there was\nno next item.\n\nThe suite may assign to the variable(s) in the target list; this does\nnot affect the next item assigned to it.\n\nNames in the target list are not deleted when the loop is finished,\nbut if the sequence is empty, it will not have been assigned to at all\nby the loop. Hint: the built-in function ``range()`` returns an\niterator of integers suitable to emulate the effect of Pascal\'s ``for\ni := a to b do``; e.g., ``list(range(3))`` returns the list ``[0, 1,\n2]``.\n\nNote: There is a subtlety when the sequence is being modified by the loop\n (this can only occur for mutable sequences, i.e. lists). An\n internal counter is used to keep track of which item is used next,\n and this is incremented on each iteration. When this counter has\n reached the length of the sequence the loop terminates. This means\n that if the suite deletes the current (or a previous) item from the\n sequence, the next item will be skipped (since it gets the index of\n the current item which has already been treated). Likewise, if the\n suite inserts an item in the sequence before the current item, the\n current item will be treated again the next time through the loop.\n This can lead to nasty bugs that can be avoided by making a\n temporary copy using a slice of the whole sequence, e.g.,\n\n for x in a[:]:\n if x < 0: a.remove(x)\n\n\nThe ``try`` statement\n=====================\n\nThe ``try`` statement specifies exception handlers and/or cleanup code\nfor a group of statements:\n\n try_stmt ::= try1_stmt | try2_stmt\n try1_stmt ::= "try" ":" suite\n ("except" [expression ["as" target]] ":" suite)+\n ["else" ":" suite]\n ["finally" ":" suite]\n try2_stmt ::= "try" ":" suite\n "finally" ":" suite\n\nThe ``except`` clause(s) specify one or more exception handlers. When\nno exception occurs in the ``try`` clause, no exception handler is\nexecuted. When an exception occurs in the ``try`` suite, a search for\nan exception handler is started. This search inspects the except\nclauses in turn until one is found that matches the exception. An\nexpression-less except clause, if present, must be last; it matches\nany exception. For an except clause with an expression, that\nexpression is evaluated, and the clause matches the exception if the\nresulting object is "compatible" with the exception. An object is\ncompatible with an exception if it is the class or a base class of the\nexception object or a tuple containing an item compatible with the\nexception.\n\nIf no except clause matches the exception, the search for an exception\nhandler continues in the surrounding code and on the invocation stack.\n[1]\n\nIf the evaluation of an expression in the header of an except clause\nraises an exception, the original search for a handler is canceled and\na search starts for the new exception in the surrounding code and on\nthe call stack (it is treated as if the entire ``try`` statement\nraised the exception).\n\nWhen a matching except clause is found, the exception is assigned to\nthe target specified after the ``as`` keyword in that except clause,\nif present, and the except clause\'s suite is executed. All except\nclauses must have an executable block. When the end of this block is\nreached, execution continues normally after the entire try statement.\n(This means that if two nested handlers exist for the same exception,\nand the exception occurs in the try clause of the inner handler, the\nouter handler will not handle the exception.)\n\nWhen an exception has been assigned using ``as target``, it is cleared\nat the end of the except clause. This is as if\n\n except E as N:\n foo\n\nwas translated to\n\n except E as N:\n try:\n foo\n finally:\n del N\n\nThis means the exception must be assigned to a different name to be\nable to refer to it after the except clause. Exceptions are cleared\nbecause with the traceback attached to them, they form a reference\ncycle with the stack frame, keeping all locals in that frame alive\nuntil the next garbage collection occurs.\n\nBefore an except clause\'s suite is executed, details about the\nexception are stored in the ``sys`` module and can be access via\n``sys.exc_info()``. ``sys.exc_info()`` returns a 3-tuple consisting of\nthe exception class, the exception instance and a traceback object\n(see section *The standard type hierarchy*) identifying the point in\nthe program where the exception occurred. ``sys.exc_info()`` values\nare restored to their previous values (before the call) when returning\nfrom a function that handled an exception.\n\nThe optional ``else`` clause is executed if and when control flows off\nthe end of the ``try`` clause. [2] Exceptions in the ``else`` clause\nare not handled by the preceding ``except`` clauses.\n\nIf ``finally`` is present, it specifies a \'cleanup\' handler. The\n``try`` clause is executed, including any ``except`` and ``else``\nclauses. If an exception occurs in any of the clauses and is not\nhandled, the exception is temporarily saved. The ``finally`` clause is\nexecuted. If there is a saved exception it is re-raised at the end of\nthe ``finally`` clause. If the ``finally`` clause raises another\nexception, the saved exception is set as the context of the new\nexception. If the ``finally`` clause executes a ``return`` or\n``break`` statement, the saved exception is discarded:\n\n def f():\n try:\n 1/0\n finally:\n return 42\n\n >>> f()\n 42\n\nThe exception information is not available to the program during\nexecution of the ``finally`` clause.\n\nWhen a ``return``, ``break`` or ``continue`` statement is executed in\nthe ``try`` suite of a ``try``...``finally`` statement, the\n``finally`` clause is also executed \'on the way out.\' A ``continue``\nstatement is illegal in the ``finally`` clause. (The reason is a\nproblem with the current implementation --- this restriction may be\nlifted in the future).\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information on using the ``raise`` statement to\ngenerate exceptions may be found in section *The raise statement*.\n\n\nThe ``with`` statement\n======================\n\nThe ``with`` statement is used to wrap the execution of a block with\nmethods defined by a context manager (see section *With Statement\nContext Managers*). This allows common\n``try``...``except``...``finally`` usage patterns to be encapsulated\nfor convenient reuse.\n\n with_stmt ::= "with" with_item ("," with_item)* ":" suite\n with_item ::= expression ["as" target]\n\nThe execution of the ``with`` statement with one "item" proceeds as\nfollows:\n\n1. The context expression (the expression given in the ``with_item``)\n is evaluated to obtain a context manager.\n\n2. The context manager\'s ``__exit__()`` is loaded for later use.\n\n3. The context manager\'s ``__enter__()`` method is invoked.\n\n4. If a target was included in the ``with`` statement, the return\n value from ``__enter__()`` is assigned to it.\n\n Note: The ``with`` statement guarantees that if the ``__enter__()``\n method returns without an error, then ``__exit__()`` will always\n be called. Thus, if an error occurs during the assignment to the\n target list, it will be treated the same as an error occurring\n within the suite would be. See step 6 below.\n\n5. The suite is executed.\n\n6. The context manager\'s ``__exit__()`` method is invoked. If an\n exception caused the suite to be exited, its type, value, and\n traceback are passed as arguments to ``__exit__()``. Otherwise,\n three ``None`` arguments are supplied.\n\n If the suite was exited due to an exception, and the return value\n from the ``__exit__()`` method was false, the exception is\n reraised. If the return value was true, the exception is\n suppressed, and execution continues with the statement following\n the ``with`` statement.\n\n If the suite was exited for any reason other than an exception, the\n return value from ``__exit__()`` is ignored, and execution proceeds\n at the normal location for the kind of exit that was taken.\n\nWith more than one item, the context managers are processed as if\nmultiple ``with`` statements were nested:\n\n with A() as a, B() as b:\n suite\n\nis equivalent to\n\n with A() as a:\n with B() as b:\n suite\n\nChanged in version 3.1: Support for multiple context expressions.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n ``with`` statement.\n\n\nFunction definitions\n====================\n\nA function definition defines a user-defined function object (see\nsection *The standard type hierarchy*):\n\n funcdef ::= [decorators] "def" funcname "(" [parameter_list] ")" ["->" expression] ":" suite\n decorators ::= decorator+\n decorator ::= "@" dotted_name ["(" [parameter_list [","]] ")"] NEWLINE\n dotted_name ::= identifier ("." identifier)*\n parameter_list ::= (defparameter ",")*\n ( "*" [parameter] ("," defparameter)* ["," "**" parameter]\n | "**" parameter\n | defparameter [","] )\n parameter ::= identifier [":" expression]\n defparameter ::= parameter ["=" expression]\n funcname ::= identifier\n\nA function definition is an executable statement. Its execution binds\nthe function name in the current local namespace to a function object\n(a wrapper around the executable code for the function). This\nfunction object contains a reference to the current global namespace\nas the global namespace to be used when the function is called.\n\nThe function definition does not execute the function body; this gets\nexecuted only when the function is called. [3]\n\nA function definition may be wrapped by one or more *decorator*\nexpressions. Decorator expressions are evaluated when the function is\ndefined, in the scope that contains the function definition. The\nresult must be a callable, which is invoked with the function object\nas the only argument. The returned value is bound to the function name\ninstead of the function object. Multiple decorators are applied in\nnested fashion. For example, the following code\n\n @f1(arg)\n @f2\n def func(): pass\n\nis equivalent to\n\n def func(): pass\n func = f1(arg)(f2(func))\n\nWhen one or more *parameters* have the form *parameter* ``=``\n*expression*, the function is said to have "default parameter values."\nFor a parameter with a default value, the corresponding *argument* may\nbe omitted from a call, in which case the parameter\'s default value is\nsubstituted. If a parameter has a default value, all following\nparameters up until the "``*``" must also have a default value ---\nthis is a syntactic restriction that is not expressed by the grammar.\n\n**Default parameter values are evaluated when the function definition\nis executed.** This means that the expression is evaluated once, when\nthe function is defined, and that the same "pre-computed" value is\nused for each call. This is especially important to understand when a\ndefault parameter is a mutable object, such as a list or a dictionary:\nif the function modifies the object (e.g. by appending an item to a\nlist), the default value is in effect modified. This is generally not\nwhat was intended. A way around this is to use ``None`` as the\ndefault, and explicitly test for it in the body of the function, e.g.:\n\n def whats_on_the_telly(penguin=None):\n if penguin is None:\n penguin = []\n penguin.append("property of the zoo")\n return penguin\n\nFunction call semantics are described in more detail in section\n*Calls*. A function call always assigns values to all parameters\nmentioned in the parameter list, either from position arguments, from\nkeyword arguments, or from default values. If the form\n"``*identifier``" is present, it is initialized to a tuple receiving\nany excess positional parameters, defaulting to the empty tuple. If\nthe form "``**identifier``" is present, it is initialized to a new\ndictionary receiving any excess keyword arguments, defaulting to a new\nempty dictionary. Parameters after "``*``" or "``*identifier``" are\nkeyword-only parameters and may only be passed used keyword arguments.\n\nParameters may have annotations of the form "``: expression``"\nfollowing the parameter name. Any parameter may have an annotation\neven those of the form ``*identifier`` or ``**identifier``. Functions\nmay have "return" annotation of the form "``-> expression``" after the\nparameter list. These annotations can be any valid Python expression\nand are evaluated when the function definition is executed.\nAnnotations may be evaluated in a different order than they appear in\nthe source code. The presence of annotations does not change the\nsemantics of a function. The annotation values are available as\nvalues of a dictionary keyed by the parameters\' names in the\n``__annotations__`` attribute of the function object.\n\nIt is also possible to create anonymous functions (functions not bound\nto a name), for immediate use in expressions. This uses lambda forms,\ndescribed in section *Lambdas*. Note that the lambda form is merely a\nshorthand for a simplified function definition; a function defined in\na "``def``" statement can be passed around or assigned to another name\njust like a function defined by a lambda form. The "``def``" form is\nactually more powerful since it allows the execution of multiple\nstatements and annotations.\n\n**Programmer\'s note:** Functions are first-class objects. A "``def``"\nform executed inside a function definition defines a local function\nthat can be returned or passed around. Free variables used in the\nnested function can access the local variables of the function\ncontaining the def. See section *Naming and binding* for details.\n\nSee also:\n\n **PEP 3107** - Function Annotations\n The original specification for function annotations.\n\n\nClass definitions\n=================\n\nA class definition defines a class object (see section *The standard\ntype hierarchy*):\n\n classdef ::= [decorators] "class" classname [inheritance] ":" suite\n inheritance ::= "(" [parameter_list] ")"\n classname ::= identifier\n\nA class definition is an executable statement. The inheritance list\nusually gives a list of base classes (see *Customizing class creation*\nfor more advanced uses), so each item in the list should evaluate to a\nclass object which allows subclassing. Classes without an inheritance\nlist inherit, by default, from the base class ``object``; hence,\n\n class Foo:\n pass\n\nis equivalent to\n\n class Foo(object):\n pass\n\nThe class\'s suite is then executed in a new execution frame (see\n*Naming and binding*), using a newly created local namespace and the\noriginal global namespace. (Usually, the suite contains mostly\nfunction definitions.) When the class\'s suite finishes execution, its\nexecution frame is discarded but its local namespace is saved. [4] A\nclass object is then created using the inheritance list for the base\nclasses and the saved local namespace for the attribute dictionary.\nThe class name is bound to this class object in the original local\nnamespace.\n\nClass creation can be customized heavily using *metaclasses*.\n\nClasses can also be decorated: just like when decorating functions,\n\n @f1(arg)\n @f2\n class Foo: pass\n\nis equivalent to\n\n class Foo: pass\n Foo = f1(arg)(f2(Foo))\n\nThe evaluation rules for the decorator expressions are the same as for\nfunction decorators. The result must be a class object, which is then\nbound to the class name.\n\n**Programmer\'s note:** Variables defined in the class definition are\nclass attributes; they are shared by instances. Instance attributes\ncan be set in a method with ``self.name = value``. Both class and\ninstance attributes are accessible through the notation\n"``self.name``", and an instance attribute hides a class attribute\nwith the same name when accessed in this way. Class attributes can be\nused as defaults for instance attributes, but using mutable values\nthere can lead to unexpected results. *Descriptors* can be used to\ncreate instance variables with different implementation details.\n\nSee also:\n\n **PEP 3115** - Metaclasses in Python 3 **PEP 3129** - Class\n Decorators\n\n-[ Footnotes ]-\n\n[1] The exception is propagated to the invocation stack unless there\n is a ``finally`` clause which happens to raise another exception.\n That new exception causes the old one to be lost.\n\n[2] Currently, control "flows off the end" except in the case of an\n exception or the execution of a ``return``, ``continue``, or\n ``break`` statement.\n\n[3] A string literal appearing as the first statement in the function\n body is transformed into the function\'s ``__doc__`` attribute and\n therefore the function\'s *docstring*.\n\n[4] A string literal appearing as the first statement in the class\n body is transformed into the namespace\'s ``__doc__`` item and\n therefore the class\'s *docstring*.\n', + 'compound': '\nCompound statements\n*******************\n\nCompound statements contain (groups of) other statements; they affect\nor control the execution of those other statements in some way. In\ngeneral, compound statements span multiple lines, although in simple\nincarnations a whole compound statement may be contained in one line.\n\nThe ``if``, ``while`` and ``for`` statements implement traditional\ncontrol flow constructs. ``try`` specifies exception handlers and/or\ncleanup code for a group of statements, while the ``with`` statement\nallows the execution of initialization and finalization code around a\nblock of code. Function and class definitions are also syntactically\ncompound statements.\n\nCompound statements consist of one or more \'clauses.\' A clause\nconsists of a header and a \'suite.\' The clause headers of a\nparticular compound statement are all at the same indentation level.\nEach clause header begins with a uniquely identifying keyword and ends\nwith a colon. A suite is a group of statements controlled by a\nclause. A suite can be one or more semicolon-separated simple\nstatements on the same line as the header, following the header\'s\ncolon, or it can be one or more indented statements on subsequent\nlines. Only the latter form of suite can contain nested compound\nstatements; the following is illegal, mostly because it wouldn\'t be\nclear to which ``if`` clause a following ``else`` clause would belong:\n\n if test1: if test2: print(x)\n\nAlso note that the semicolon binds tighter than the colon in this\ncontext, so that in the following example, either all or none of the\n``print()`` calls are executed:\n\n if x < y < z: print(x); print(y); print(z)\n\nSummarizing:\n\n compound_stmt ::= if_stmt\n | while_stmt\n | for_stmt\n | try_stmt\n | with_stmt\n | funcdef\n | classdef\n suite ::= stmt_list NEWLINE | NEWLINE INDENT statement+ DEDENT\n statement ::= stmt_list NEWLINE | compound_stmt\n stmt_list ::= simple_stmt (";" simple_stmt)* [";"]\n\nNote that statements always end in a ``NEWLINE`` possibly followed by\na ``DEDENT``. Also note that optional continuation clauses always\nbegin with a keyword that cannot start a statement, thus there are no\nambiguities (the \'dangling ``else``\' problem is solved in Python by\nrequiring nested ``if`` statements to be indented).\n\nThe formatting of the grammar rules in the following sections places\neach clause on a separate line for clarity.\n\n\nThe ``if`` statement\n====================\n\nThe ``if`` statement is used for conditional execution:\n\n if_stmt ::= "if" expression ":" suite\n ( "elif" expression ":" suite )*\n ["else" ":" suite]\n\nIt selects exactly one of the suites by evaluating the expressions one\nby one until one is found to be true (see section *Boolean operations*\nfor the definition of true and false); then that suite is executed\n(and no other part of the ``if`` statement is executed or evaluated).\nIf all expressions are false, the suite of the ``else`` clause, if\npresent, is executed.\n\n\nThe ``while`` statement\n=======================\n\nThe ``while`` statement is used for repeated execution as long as an\nexpression is true:\n\n while_stmt ::= "while" expression ":" suite\n ["else" ":" suite]\n\nThis repeatedly tests the expression and, if it is true, executes the\nfirst suite; if the expression is false (which may be the first time\nit is tested) the suite of the ``else`` clause, if present, is\nexecuted and the loop terminates.\n\nA ``break`` statement executed in the first suite terminates the loop\nwithout executing the ``else`` clause\'s suite. A ``continue``\nstatement executed in the first suite skips the rest of the suite and\ngoes back to testing the expression.\n\n\nThe ``for`` statement\n=====================\n\nThe ``for`` statement is used to iterate over the elements of a\nsequence (such as a string, tuple or list) or other iterable object:\n\n for_stmt ::= "for" target_list "in" expression_list ":" suite\n ["else" ":" suite]\n\nThe expression list is evaluated once; it should yield an iterable\nobject. An iterator is created for the result of the\n``expression_list``. The suite is then executed once for each item\nprovided by the iterator, in the order of ascending indices. Each\nitem in turn is assigned to the target list using the standard rules\nfor assignments (see *Assignment statements*), and then the suite is\nexecuted. When the items are exhausted (which is immediately when the\nsequence is empty or an iterator raises a ``StopIteration``\nexception), the suite in the ``else`` clause, if present, is executed,\nand the loop terminates.\n\nA ``break`` statement executed in the first suite terminates the loop\nwithout executing the ``else`` clause\'s suite. A ``continue``\nstatement executed in the first suite skips the rest of the suite and\ncontinues with the next item, or with the ``else`` clause if there was\nno next item.\n\nThe suite may assign to the variable(s) in the target list; this does\nnot affect the next item assigned to it.\n\nNames in the target list are not deleted when the loop is finished,\nbut if the sequence is empty, it will not have been assigned to at all\nby the loop. Hint: the built-in function ``range()`` returns an\niterator of integers suitable to emulate the effect of Pascal\'s ``for\ni := a to b do``; e.g., ``list(range(3))`` returns the list ``[0, 1,\n2]``.\n\nNote: There is a subtlety when the sequence is being modified by the loop\n (this can only occur for mutable sequences, i.e. lists). An\n internal counter is used to keep track of which item is used next,\n and this is incremented on each iteration. When this counter has\n reached the length of the sequence the loop terminates. This means\n that if the suite deletes the current (or a previous) item from the\n sequence, the next item will be skipped (since it gets the index of\n the current item which has already been treated). Likewise, if the\n suite inserts an item in the sequence before the current item, the\n current item will be treated again the next time through the loop.\n This can lead to nasty bugs that can be avoided by making a\n temporary copy using a slice of the whole sequence, e.g.,\n\n for x in a[:]:\n if x < 0: a.remove(x)\n\n\nThe ``try`` statement\n=====================\n\nThe ``try`` statement specifies exception handlers and/or cleanup code\nfor a group of statements:\n\n try_stmt ::= try1_stmt | try2_stmt\n try1_stmt ::= "try" ":" suite\n ("except" [expression ["as" target]] ":" suite)+\n ["else" ":" suite]\n ["finally" ":" suite]\n try2_stmt ::= "try" ":" suite\n "finally" ":" suite\n\nThe ``except`` clause(s) specify one or more exception handlers. When\nno exception occurs in the ``try`` clause, no exception handler is\nexecuted. When an exception occurs in the ``try`` suite, a search for\nan exception handler is started. This search inspects the except\nclauses in turn until one is found that matches the exception. An\nexpression-less except clause, if present, must be last; it matches\nany exception. For an except clause with an expression, that\nexpression is evaluated, and the clause matches the exception if the\nresulting object is "compatible" with the exception. An object is\ncompatible with an exception if it is the class or a base class of the\nexception object or a tuple containing an item compatible with the\nexception.\n\nIf no except clause matches the exception, the search for an exception\nhandler continues in the surrounding code and on the invocation stack.\n[1]\n\nIf the evaluation of an expression in the header of an except clause\nraises an exception, the original search for a handler is canceled and\na search starts for the new exception in the surrounding code and on\nthe call stack (it is treated as if the entire ``try`` statement\nraised the exception).\n\nWhen a matching except clause is found, the exception is assigned to\nthe target specified after the ``as`` keyword in that except clause,\nif present, and the except clause\'s suite is executed. All except\nclauses must have an executable block. When the end of this block is\nreached, execution continues normally after the entire try statement.\n(This means that if two nested handlers exist for the same exception,\nand the exception occurs in the try clause of the inner handler, the\nouter handler will not handle the exception.)\n\nWhen an exception has been assigned using ``as target``, it is cleared\nat the end of the except clause. This is as if\n\n except E as N:\n foo\n\nwas translated to\n\n except E as N:\n try:\n foo\n finally:\n del N\n\nThis means the exception must be assigned to a different name to be\nable to refer to it after the except clause. Exceptions are cleared\nbecause with the traceback attached to them, they form a reference\ncycle with the stack frame, keeping all locals in that frame alive\nuntil the next garbage collection occurs.\n\nBefore an except clause\'s suite is executed, details about the\nexception are stored in the ``sys`` module and can be access via\n``sys.exc_info()``. ``sys.exc_info()`` returns a 3-tuple consisting of\nthe exception class, the exception instance and a traceback object\n(see section *The standard type hierarchy*) identifying the point in\nthe program where the exception occurred. ``sys.exc_info()`` values\nare restored to their previous values (before the call) when returning\nfrom a function that handled an exception.\n\nThe optional ``else`` clause is executed if and when control flows off\nthe end of the ``try`` clause. [2] Exceptions in the ``else`` clause\nare not handled by the preceding ``except`` clauses.\n\nIf ``finally`` is present, it specifies a \'cleanup\' handler. The\n``try`` clause is executed, including any ``except`` and ``else``\nclauses. If an exception occurs in any of the clauses and is not\nhandled, the exception is temporarily saved. The ``finally`` clause is\nexecuted. If there is a saved exception it is re-raised at the end of\nthe ``finally`` clause. If the ``finally`` clause raises another\nexception, the saved exception is set as the context of the new\nexception. If the ``finally`` clause executes a ``return`` or\n``break`` statement, the saved exception is discarded:\n\n def f():\n try:\n 1/0\n finally:\n return 42\n\n >>> f()\n 42\n\nThe exception information is not available to the program during\nexecution of the ``finally`` clause.\n\nWhen a ``return``, ``break`` or ``continue`` statement is executed in\nthe ``try`` suite of a ``try``...``finally`` statement, the\n``finally`` clause is also executed \'on the way out.\' A ``continue``\nstatement is illegal in the ``finally`` clause. (The reason is a\nproblem with the current implementation --- this restriction may be\nlifted in the future).\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information on using the ``raise`` statement to\ngenerate exceptions may be found in section *The raise statement*.\n\n\nThe ``with`` statement\n======================\n\nThe ``with`` statement is used to wrap the execution of a block with\nmethods defined by a context manager (see section *With Statement\nContext Managers*). This allows common\n``try``...``except``...``finally`` usage patterns to be encapsulated\nfor convenient reuse.\n\n with_stmt ::= "with" with_item ("," with_item)* ":" suite\n with_item ::= expression ["as" target]\n\nThe execution of the ``with`` statement with one "item" proceeds as\nfollows:\n\n1. The context expression (the expression given in the ``with_item``)\n is evaluated to obtain a context manager.\n\n2. The context manager\'s ``__exit__()`` is loaded for later use.\n\n3. The context manager\'s ``__enter__()`` method is invoked.\n\n4. If a target was included in the ``with`` statement, the return\n value from ``__enter__()`` is assigned to it.\n\n Note: The ``with`` statement guarantees that if the ``__enter__()``\n method returns without an error, then ``__exit__()`` will always\n be called. Thus, if an error occurs during the assignment to the\n target list, it will be treated the same as an error occurring\n within the suite would be. See step 6 below.\n\n5. The suite is executed.\n\n6. The context manager\'s ``__exit__()`` method is invoked. If an\n exception caused the suite to be exited, its type, value, and\n traceback are passed as arguments to ``__exit__()``. Otherwise,\n three ``None`` arguments are supplied.\n\n If the suite was exited due to an exception, and the return value\n from the ``__exit__()`` method was false, the exception is\n reraised. If the return value was true, the exception is\n suppressed, and execution continues with the statement following\n the ``with`` statement.\n\n If the suite was exited for any reason other than an exception, the\n return value from ``__exit__()`` is ignored, and execution proceeds\n at the normal location for the kind of exit that was taken.\n\nWith more than one item, the context managers are processed as if\nmultiple ``with`` statements were nested:\n\n with A() as a, B() as b:\n suite\n\nis equivalent to\n\n with A() as a:\n with B() as b:\n suite\n\nChanged in version 3.1: Support for multiple context expressions.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n ``with`` statement.\n\n\nFunction definitions\n====================\n\nA function definition defines a user-defined function object (see\nsection *The standard type hierarchy*):\n\n funcdef ::= [decorators] "def" funcname "(" [parameter_list] ")" ["->" expression] ":" suite\n decorators ::= decorator+\n decorator ::= "@" dotted_name ["(" [parameter_list [","]] ")"] NEWLINE\n dotted_name ::= identifier ("." identifier)*\n parameter_list ::= (defparameter ",")*\n ( "*" [parameter] ("," defparameter)* ["," "**" parameter]\n | "**" parameter\n | defparameter [","] )\n parameter ::= identifier [":" expression]\n defparameter ::= parameter ["=" expression]\n funcname ::= identifier\n\nA function definition is an executable statement. Its execution binds\nthe function name in the current local namespace to a function object\n(a wrapper around the executable code for the function). This\nfunction object contains a reference to the current global namespace\nas the global namespace to be used when the function is called.\n\nThe function definition does not execute the function body; this gets\nexecuted only when the function is called. [3]\n\nA function definition may be wrapped by one or more *decorator*\nexpressions. Decorator expressions are evaluated when the function is\ndefined, in the scope that contains the function definition. The\nresult must be a callable, which is invoked with the function object\nas the only argument. The returned value is bound to the function name\ninstead of the function object. Multiple decorators are applied in\nnested fashion. For example, the following code\n\n @f1(arg)\n @f2\n def func(): pass\n\nis equivalent to\n\n def func(): pass\n func = f1(arg)(f2(func))\n\nWhen one or more *parameters* have the form *parameter* ``=``\n*expression*, the function is said to have "default parameter values."\nFor a parameter with a default value, the corresponding *argument* may\nbe omitted from a call, in which case the parameter\'s default value is\nsubstituted. If a parameter has a default value, all following\nparameters up until the "``*``" must also have a default value ---\nthis is a syntactic restriction that is not expressed by the grammar.\n\n**Default parameter values are evaluated from left to right when the\nfunction definition is executed.** This means that the expression is\nevaluated once, when the function is defined, and that the same "pre-\ncomputed" value is used for each call. This is especially important\nto understand when a default parameter is a mutable object, such as a\nlist or a dictionary: if the function modifies the object (e.g. by\nappending an item to a list), the default value is in effect modified.\nThis is generally not what was intended. A way around this is to use\n``None`` as the default, and explicitly test for it in the body of the\nfunction, e.g.:\n\n def whats_on_the_telly(penguin=None):\n if penguin is None:\n penguin = []\n penguin.append("property of the zoo")\n return penguin\n\nFunction call semantics are described in more detail in section\n*Calls*. A function call always assigns values to all parameters\nmentioned in the parameter list, either from position arguments, from\nkeyword arguments, or from default values. If the form\n"``*identifier``" is present, it is initialized to a tuple receiving\nany excess positional parameters, defaulting to the empty tuple. If\nthe form "``**identifier``" is present, it is initialized to a new\ndictionary receiving any excess keyword arguments, defaulting to a new\nempty dictionary. Parameters after "``*``" or "``*identifier``" are\nkeyword-only parameters and may only be passed used keyword arguments.\n\nParameters may have annotations of the form "``: expression``"\nfollowing the parameter name. Any parameter may have an annotation\neven those of the form ``*identifier`` or ``**identifier``. Functions\nmay have "return" annotation of the form "``-> expression``" after the\nparameter list. These annotations can be any valid Python expression\nand are evaluated when the function definition is executed.\nAnnotations may be evaluated in a different order than they appear in\nthe source code. The presence of annotations does not change the\nsemantics of a function. The annotation values are available as\nvalues of a dictionary keyed by the parameters\' names in the\n``__annotations__`` attribute of the function object.\n\nIt is also possible to create anonymous functions (functions not bound\nto a name), for immediate use in expressions. This uses lambda forms,\ndescribed in section *Lambdas*. Note that the lambda form is merely a\nshorthand for a simplified function definition; a function defined in\na "``def``" statement can be passed around or assigned to another name\njust like a function defined by a lambda form. The "``def``" form is\nactually more powerful since it allows the execution of multiple\nstatements and annotations.\n\n**Programmer\'s note:** Functions are first-class objects. A "``def``"\nform executed inside a function definition defines a local function\nthat can be returned or passed around. Free variables used in the\nnested function can access the local variables of the function\ncontaining the def. See section *Naming and binding* for details.\n\nSee also:\n\n **PEP 3107** - Function Annotations\n The original specification for function annotations.\n\n\nClass definitions\n=================\n\nA class definition defines a class object (see section *The standard\ntype hierarchy*):\n\n classdef ::= [decorators] "class" classname [inheritance] ":" suite\n inheritance ::= "(" [parameter_list] ")"\n classname ::= identifier\n\nA class definition is an executable statement. The inheritance list\nusually gives a list of base classes (see *Customizing class creation*\nfor more advanced uses), so each item in the list should evaluate to a\nclass object which allows subclassing. Classes without an inheritance\nlist inherit, by default, from the base class ``object``; hence,\n\n class Foo:\n pass\n\nis equivalent to\n\n class Foo(object):\n pass\n\nThe class\'s suite is then executed in a new execution frame (see\n*Naming and binding*), using a newly created local namespace and the\noriginal global namespace. (Usually, the suite contains mostly\nfunction definitions.) When the class\'s suite finishes execution, its\nexecution frame is discarded but its local namespace is saved. [4] A\nclass object is then created using the inheritance list for the base\nclasses and the saved local namespace for the attribute dictionary.\nThe class name is bound to this class object in the original local\nnamespace.\n\nClass creation can be customized heavily using *metaclasses*.\n\nClasses can also be decorated: just like when decorating functions,\n\n @f1(arg)\n @f2\n class Foo: pass\n\nis equivalent to\n\n class Foo: pass\n Foo = f1(arg)(f2(Foo))\n\nThe evaluation rules for the decorator expressions are the same as for\nfunction decorators. The result must be a class object, which is then\nbound to the class name.\n\n**Programmer\'s note:** Variables defined in the class definition are\nclass attributes; they are shared by instances. Instance attributes\ncan be set in a method with ``self.name = value``. Both class and\ninstance attributes are accessible through the notation\n"``self.name``", and an instance attribute hides a class attribute\nwith the same name when accessed in this way. Class attributes can be\nused as defaults for instance attributes, but using mutable values\nthere can lead to unexpected results. *Descriptors* can be used to\ncreate instance variables with different implementation details.\n\nSee also:\n\n **PEP 3115** - Metaclasses in Python 3 **PEP 3129** - Class\n Decorators\n\n-[ Footnotes ]-\n\n[1] The exception is propagated to the invocation stack unless there\n is a ``finally`` clause which happens to raise another exception.\n That new exception causes the old one to be lost.\n\n[2] Currently, control "flows off the end" except in the case of an\n exception or the execution of a ``return``, ``continue``, or\n ``break`` statement.\n\n[3] A string literal appearing as the first statement in the function\n body is transformed into the function\'s ``__doc__`` attribute and\n therefore the function\'s *docstring*.\n\n[4] A string literal appearing as the first statement in the class\n body is transformed into the namespace\'s ``__doc__`` item and\n therefore the class\'s *docstring*.\n', 'context-managers': '\nWith Statement Context Managers\n*******************************\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a ``with`` statement. The context\nmanager handles the entry into, and the exit from, the desired runtime\ncontext for the execution of the block of code. Context managers are\nnormally invoked using the ``with`` statement (described in section\n*The with statement*), but can also be used by directly invoking their\nmethods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The ``with``\n statement will bind this method\'s return value to the target(s)\n specified in the ``as`` clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be ``None``.\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that ``__exit__()`` methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n ``with`` statement.\n', 'continue': '\nThe ``continue`` statement\n**************************\n\n continue_stmt ::= "continue"\n\n``continue`` may only occur syntactically nested in a ``for`` or\n``while`` loop, but not nested in a function or class definition or\n``finally`` clause within that loop. It continues with the next cycle\nof the nearest enclosing loop.\n\nWhen ``continue`` passes control out of a ``try`` statement with a\n``finally`` clause, that ``finally`` clause is executed before really\nstarting the next loop cycle.\n', 'conversions': '\nArithmetic conversions\n**********************\n\nWhen a description of an arithmetic operator below uses the phrase\n"the numeric arguments are converted to a common type," this means\nthat the operator implementation for built-in types works that way:\n\n* If either argument is a complex number, the other is converted to\n complex;\n\n* otherwise, if either argument is a floating point number, the other\n is converted to floating point;\n\n* otherwise, both must be integers and no conversion is necessary.\n\nSome additional rules apply for certain operators (e.g., a string left\nargument to the \'%\' operator). Extensions must define their own\nconversion behavior.\n', - 'customization': '\nBasic customization\n*******************\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. ``__new__()`` is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of ``__new__()`` should be the new object instance (usually\n an instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s ``__new__()`` method using\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If ``__new__()`` returns an instance of *cls*, then the new\n instance\'s ``__init__()`` method will be invoked like\n ``__init__(self[, ...])``, where *self* is the new instance and the\n remaining arguments are the same as were passed to ``__new__()``.\n\n If ``__new__()`` does not return an instance of *cls*, then the new\n instance\'s ``__init__()`` method will not be invoked.\n\n ``__new__()`` is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n ``__init__()`` method, the derived class\'s ``__init__()`` method,\n if any, must explicitly call it to ensure proper initialization of\n the base class part of the instance; for example:\n ``BaseClass.__init__(self, [args...])``. As a special constraint\n on constructors, no value may be returned; doing so will cause a\n ``TypeError`` to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a ``__del__()`` method,\n the derived class\'s ``__del__()`` method, if any, must explicitly\n call it to ensure proper deletion of the base class part of the\n instance. Note that it is possible (though not recommended!) for\n the ``__del__()`` method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n ``__del__()`` methods are called for objects that still exist when\n the interpreter exits.\n\n Note: ``del x`` doesn\'t directly call ``x.__del__()`` --- the former\n decrements the reference count for ``x`` by one, and the latter\n is only called when ``x``\'s reference count reaches zero. Some\n common situations that may prevent the reference count of an\n object from going to zero include: circular references between\n objects (e.g., a doubly-linked list or a tree data structure with\n parent and child pointers); a reference to the object on the\n stack frame of a function that caught an exception (the traceback\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\n a reference to the object on the stack frame that raised an\n unhandled exception in interactive mode (the traceback stored in\n ``sys.last_traceback`` keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing ``None`` in\n ``sys.last_traceback``. Circular references which are garbage are\n detected when the option cycle detector is enabled (it\'s on by\n default), but can only be cleaned up if there are no Python-\n level ``__del__()`` methods involved. Refer to the documentation\n for the ``gc`` module for more information about how\n ``__del__()`` methods are handled by the cycle detector,\n particularly the description of the ``garbage`` value.\n\n Warning: Due to the precarious circumstances under which ``__del__()``\n methods are invoked, exceptions that occur during their execution\n are ignored, and a warning is printed to ``sys.stderr`` instead.\n Also, when ``__del__()`` is invoked in response to a module being\n deleted (e.g., when execution of the program is done), other\n globals referenced by the ``__del__()`` method may already have\n been deleted or in the process of being torn down (e.g. the\n import machinery shutting down). For this reason, ``__del__()``\n methods should do the absolute minimum needed to maintain\n external invariants. Starting with version 1.5, Python\n guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the ``__del__()`` method is called.\n\nobject.__repr__(self)\n\n Called by the ``repr()`` built-in function to compute the\n "official" string representation of an object. If at all possible,\n this should look like a valid Python expression that could be used\n to recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n ``<...some useful description...>`` should be returned. The return\n value must be a string object. If a class defines ``__repr__()``\n but not ``__str__()``, then ``__repr__()`` is also used when an\n "informal" string representation of instances of that class is\n required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by ``str(object)`` and the built-in functions ``format()``\n and ``print()`` to compute the "informal" or nicely printable\n string representation of an object. The return value must be a\n *string* object.\n\n This method differs from ``object.__repr__()`` in that there is no\n expectation that ``__str__()`` return a valid Python expression: a\n more convenient or concise representation can be used.\n\n The default implementation defined by the built-in type ``object``\n calls ``object.__repr__()``.\n\nobject.__bytes__(self)\n\n Called by ``bytes()`` to compute a byte-string representation of an\n object. This should return a ``bytes`` object.\n\nobject.__format__(self, format_spec)\n\n Called by the ``format()`` built-in function (and by extension, the\n ``str.format()`` method of class ``str``) to produce a "formatted"\n string representation of an object. The ``format_spec`` argument is\n a string that contains a description of the formatting options\n desired. The interpretation of the ``format_spec`` argument is up\n to the type implementing ``__format__()``, however most classes\n will either delegate formatting to one of the built-in types, or\n use a similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: ``xy`` calls ``x.__gt__(y)``, and ``x>=y`` calls\n ``x.__ge__(y)``.\n\n A rich comparison method may return the singleton\n ``NotImplemented`` if it does not implement the operation for a\n given pair of arguments. By convention, ``False`` and ``True`` are\n returned for a successful comparison. However, these methods can\n return any value, so if the comparison operator is used in a\n Boolean context (e.g., in the condition of an ``if`` statement),\n Python will call ``bool()`` on the value to determine if the result\n is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\n Accordingly, when defining ``__eq__()``, one should also define\n ``__ne__()`` so that the operators will behave as expected. See\n the paragraph on ``__hash__()`` for some important notes on\n creating *hashable* objects which support custom comparison\n operations and are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\n other\'s reflection, ``__le__()`` and ``__ge__()`` are each other\'s\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\n reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see ``functools.total_ordering()``.\n\nobject.__hash__(self)\n\n Called by built-in function ``hash()`` and for operations on\n members of hashed collections including ``set``, ``frozenset``, and\n ``dict``. ``__hash__()`` should return an integer. The only\n required property is that objects which compare equal have the same\n hash value; it is advised to somehow mix together (e.g. using\n exclusive or) the hash values for the components of the object that\n also play a part in comparison of objects.\n\n If a class does not define an ``__eq__()`` method it should not\n define a ``__hash__()`` operation either; if it defines\n ``__eq__()`` but not ``__hash__()``, its instances will not be\n usable as items in hashable collections. If a class defines\n mutable objects and implements an ``__eq__()`` method, it should\n not implement ``__hash__()``, since the implementation of hashable\n collections requires that a key\'s hash value is immutable (if the\n object\'s hash value changes, it will be in the wrong hash bucket).\n\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\n by default; with them, all objects compare unequal (except with\n themselves) and ``x.__hash__()`` returns an appropriate value such\n that ``x == y`` implies both that ``x is y`` and ``hash(x) ==\n hash(y)``.\n\n A class that overrides ``__eq__()`` and does not define\n ``__hash__()`` will have its ``__hash__()`` implicitly set to\n ``None``. When the ``__hash__()`` method of a class is ``None``,\n instances of the class will raise an appropriate ``TypeError`` when\n a program attempts to retrieve their hash value, and will also be\n correctly identified as unhashable when checking ``isinstance(obj,\n collections.Hashable``).\n\n If a class that overrides ``__eq__()`` needs to retain the\n implementation of ``__hash__()`` from a parent class, the\n interpreter must be told this explicitly by setting ``__hash__ =\n .__hash__``.\n\n If a class that does not override ``__eq__()`` wishes to suppress\n hash support, it should include ``__hash__ = None`` in the class\n definition. A class which defines its own ``__hash__()`` that\n explicitly raises a ``TypeError`` would be incorrectly identified\n as hashable by an ``isinstance(obj, collections.Hashable)`` call.\n\n Note: By default, the ``__hash__()`` values of str, bytes and datetime\n objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the iteration order of\n dicts, sets and other mappings. Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also ``PYTHONHASHSEED``.\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n ``bool()``; should return ``False`` or ``True``. When this method\n is not defined, ``__len__()`` is called, if it is defined, and the\n object is considered true if its result is nonzero. If a class\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\n are considered true.\n', + 'customization': '\nBasic customization\n*******************\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. ``__new__()`` is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of ``__new__()`` should be the new object instance (usually\n an instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s ``__new__()`` method using\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If ``__new__()`` returns an instance of *cls*, then the new\n instance\'s ``__init__()`` method will be invoked like\n ``__init__(self[, ...])``, where *self* is the new instance and the\n remaining arguments are the same as were passed to ``__new__()``.\n\n If ``__new__()`` does not return an instance of *cls*, then the new\n instance\'s ``__init__()`` method will not be invoked.\n\n ``__new__()`` is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n ``__init__()`` method, the derived class\'s ``__init__()`` method,\n if any, must explicitly call it to ensure proper initialization of\n the base class part of the instance; for example:\n ``BaseClass.__init__(self, [args...])``. As a special constraint\n on constructors, no value may be returned; doing so will cause a\n ``TypeError`` to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a ``__del__()`` method,\n the derived class\'s ``__del__()`` method, if any, must explicitly\n call it to ensure proper deletion of the base class part of the\n instance. Note that it is possible (though not recommended!) for\n the ``__del__()`` method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n ``__del__()`` methods are called for objects that still exist when\n the interpreter exits.\n\n Note: ``del x`` doesn\'t directly call ``x.__del__()`` --- the former\n decrements the reference count for ``x`` by one, and the latter\n is only called when ``x``\'s reference count reaches zero. Some\n common situations that may prevent the reference count of an\n object from going to zero include: circular references between\n objects (e.g., a doubly-linked list or a tree data structure with\n parent and child pointers); a reference to the object on the\n stack frame of a function that caught an exception (the traceback\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\n a reference to the object on the stack frame that raised an\n unhandled exception in interactive mode (the traceback stored in\n ``sys.last_traceback`` keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing ``None`` in\n ``sys.last_traceback``. Circular references which are garbage are\n detected and cleaned up when the cyclic garbage collector is\n enabled (it\'s on by default). Refer to the documentation for the\n ``gc`` module for more information about this topic.\n\n Warning: Due to the precarious circumstances under which ``__del__()``\n methods are invoked, exceptions that occur during their execution\n are ignored, and a warning is printed to ``sys.stderr`` instead.\n Also, when ``__del__()`` is invoked in response to a module being\n deleted (e.g., when execution of the program is done), other\n globals referenced by the ``__del__()`` method may already have\n been deleted or in the process of being torn down (e.g. the\n import machinery shutting down). For this reason, ``__del__()``\n methods should do the absolute minimum needed to maintain\n external invariants. Starting with version 1.5, Python\n guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the ``__del__()`` method is called.\n\nobject.__repr__(self)\n\n Called by the ``repr()`` built-in function to compute the\n "official" string representation of an object. If at all possible,\n this should look like a valid Python expression that could be used\n to recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n ``<...some useful description...>`` should be returned. The return\n value must be a string object. If a class defines ``__repr__()``\n but not ``__str__()``, then ``__repr__()`` is also used when an\n "informal" string representation of instances of that class is\n required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by ``str(object)`` and the built-in functions ``format()``\n and ``print()`` to compute the "informal" or nicely printable\n string representation of an object. The return value must be a\n *string* object.\n\n This method differs from ``object.__repr__()`` in that there is no\n expectation that ``__str__()`` return a valid Python expression: a\n more convenient or concise representation can be used.\n\n The default implementation defined by the built-in type ``object``\n calls ``object.__repr__()``.\n\nobject.__bytes__(self)\n\n Called by ``bytes()`` to compute a byte-string representation of an\n object. This should return a ``bytes`` object.\n\nobject.__format__(self, format_spec)\n\n Called by the ``format()`` built-in function (and by extension, the\n ``str.format()`` method of class ``str``) to produce a "formatted"\n string representation of an object. The ``format_spec`` argument is\n a string that contains a description of the formatting options\n desired. The interpretation of the ``format_spec`` argument is up\n to the type implementing ``__format__()``, however most classes\n will either delegate formatting to one of the built-in types, or\n use a similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: ``xy`` calls ``x.__gt__(y)``, and ``x>=y`` calls\n ``x.__ge__(y)``.\n\n A rich comparison method may return the singleton\n ``NotImplemented`` if it does not implement the operation for a\n given pair of arguments. By convention, ``False`` and ``True`` are\n returned for a successful comparison. However, these methods can\n return any value, so if the comparison operator is used in a\n Boolean context (e.g., in the condition of an ``if`` statement),\n Python will call ``bool()`` on the value to determine if the result\n is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\n Accordingly, when defining ``__eq__()``, one should also define\n ``__ne__()`` so that the operators will behave as expected. See\n the paragraph on ``__hash__()`` for some important notes on\n creating *hashable* objects which support custom comparison\n operations and are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\n other\'s reflection, ``__le__()`` and ``__ge__()`` are each other\'s\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\n reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see ``functools.total_ordering()``.\n\nobject.__hash__(self)\n\n Called by built-in function ``hash()`` and for operations on\n members of hashed collections including ``set``, ``frozenset``, and\n ``dict``. ``__hash__()`` should return an integer. The only\n required property is that objects which compare equal have the same\n hash value; it is advised to somehow mix together (e.g. using\n exclusive or) the hash values for the components of the object that\n also play a part in comparison of objects.\n\n Note: ``hash()`` truncates the value returned from an object\'s custom\n ``__hash__()`` method to the size of a ``Py_ssize_t``. This is\n typically 8 bytes on 64-bit builds and 4 bytes on 32-bit builds.\n If an object\'s ``__hash__()`` must interoperate on builds of\n different bit sizes, be sure to check the width on all supported\n builds. An easy way to do this is with ``python -c "import sys;\n print(sys.hash_info.width)"``\n\n If a class does not define an ``__eq__()`` method it should not\n define a ``__hash__()`` operation either; if it defines\n ``__eq__()`` but not ``__hash__()``, its instances will not be\n usable as items in hashable collections. If a class defines\n mutable objects and implements an ``__eq__()`` method, it should\n not implement ``__hash__()``, since the implementation of hashable\n collections requires that a key\'s hash value is immutable (if the\n object\'s hash value changes, it will be in the wrong hash bucket).\n\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\n by default; with them, all objects compare unequal (except with\n themselves) and ``x.__hash__()`` returns an appropriate value such\n that ``x == y`` implies both that ``x is y`` and ``hash(x) ==\n hash(y)``.\n\n A class that overrides ``__eq__()`` and does not define\n ``__hash__()`` will have its ``__hash__()`` implicitly set to\n ``None``. When the ``__hash__()`` method of a class is ``None``,\n instances of the class will raise an appropriate ``TypeError`` when\n a program attempts to retrieve their hash value, and will also be\n correctly identified as unhashable when checking ``isinstance(obj,\n collections.Hashable``).\n\n If a class that overrides ``__eq__()`` needs to retain the\n implementation of ``__hash__()`` from a parent class, the\n interpreter must be told this explicitly by setting ``__hash__ =\n .__hash__``.\n\n If a class that does not override ``__eq__()`` wishes to suppress\n hash support, it should include ``__hash__ = None`` in the class\n definition. A class which defines its own ``__hash__()`` that\n explicitly raises a ``TypeError`` would be incorrectly identified\n as hashable by an ``isinstance(obj, collections.Hashable)`` call.\n\n Note: By default, the ``__hash__()`` values of str, bytes and datetime\n objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the iteration order of\n dicts, sets and other mappings. Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also ``PYTHONHASHSEED``.\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n ``bool()``; should return ``False`` or ``True``. When this method\n is not defined, ``__len__()`` is called, if it is defined, and the\n object is considered true if its result is nonzero. If a class\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\n are considered true.\n', 'debugger': '\n``pdb`` --- The Python Debugger\n*******************************\n\nThe module ``pdb`` defines an interactive source code debugger for\nPython programs. It supports setting (conditional) breakpoints and\nsingle stepping at the source line level, inspection of stack frames,\nsource code listing, and evaluation of arbitrary Python code in the\ncontext of any stack frame. It also supports post-mortem debugging\nand can be called under program control.\n\nThe debugger is extensible -- it is actually defined as the class\n``Pdb``. This is currently undocumented but easily understood by\nreading the source. The extension interface uses the modules ``bdb``\nand ``cmd``.\n\nThe debugger\'s prompt is ``(Pdb)``. Typical usage to run a program\nunder control of the debugger is:\n\n >>> import pdb\n >>> import mymodule\n >>> pdb.run(\'mymodule.test()\')\n > (0)?()\n (Pdb) continue\n > (1)?()\n (Pdb) continue\n NameError: \'spam\'\n > (1)?()\n (Pdb)\n\nChanged in version 3.3: Tab-completion via the ``readline`` module is\navailable for commands and command arguments, e.g. the current global\nand local names are offered as arguments of the ``print`` command.\n\n``pdb.py`` can also be invoked as a script to debug other scripts.\nFor example:\n\n python3 -m pdb myscript.py\n\nWhen invoked as a script, pdb will automatically enter post-mortem\ndebugging if the program being debugged exits abnormally. After post-\nmortem debugging (or after normal exit of the program), pdb will\nrestart the program. Automatic restarting preserves pdb\'s state (such\nas breakpoints) and in most cases is more useful than quitting the\ndebugger upon program\'s exit.\n\nNew in version 3.2: ``pdb.py`` now accepts a ``-c`` option that\nexecutes commands as if given in a ``.pdbrc`` file, see *Debugger\nCommands*.\n\nThe typical usage to break into the debugger from a running program is\nto insert\n\n import pdb; pdb.set_trace()\n\nat the location you want to break into the debugger. You can then\nstep through the code following this statement, and continue running\nwithout the debugger using the ``continue`` command.\n\nThe typical usage to inspect a crashed program is:\n\n >>> import pdb\n >>> import mymodule\n >>> mymodule.test()\n Traceback (most recent call last):\n File "", line 1, in ?\n File "./mymodule.py", line 4, in test\n test2()\n File "./mymodule.py", line 3, in test2\n print(spam)\n NameError: spam\n >>> pdb.pm()\n > ./mymodule.py(3)test2()\n -> print(spam)\n (Pdb)\n\nThe module defines the following functions; each enters the debugger\nin a slightly different way:\n\npdb.run(statement, globals=None, locals=None)\n\n Execute the *statement* (given as a string or a code object) under\n debugger control. The debugger prompt appears before any code is\n executed; you can set breakpoints and type ``continue``, or you can\n step through the statement using ``step`` or ``next`` (all these\n commands are explained below). The optional *globals* and *locals*\n arguments specify the environment in which the code is executed; by\n default the dictionary of the module ``__main__`` is used. (See\n the explanation of the built-in ``exec()`` or ``eval()``\n functions.)\n\npdb.runeval(expression, globals=None, locals=None)\n\n Evaluate the *expression* (given as a string or a code object)\n under debugger control. When ``runeval()`` returns, it returns the\n value of the expression. Otherwise this function is similar to\n ``run()``.\n\npdb.runcall(function, *args, **kwds)\n\n Call the *function* (a function or method object, not a string)\n with the given arguments. When ``runcall()`` returns, it returns\n whatever the function call returned. The debugger prompt appears\n as soon as the function is entered.\n\npdb.set_trace()\n\n Enter the debugger at the calling stack frame. This is useful to\n hard-code a breakpoint at a given point in a program, even if the\n code is not otherwise being debugged (e.g. when an assertion\n fails).\n\npdb.post_mortem(traceback=None)\n\n Enter post-mortem debugging of the given *traceback* object. If no\n *traceback* is given, it uses the one of the exception that is\n currently being handled (an exception must be being handled if the\n default is to be used).\n\npdb.pm()\n\n Enter post-mortem debugging of the traceback found in\n ``sys.last_traceback``.\n\nThe ``run*`` functions and ``set_trace()`` are aliases for\ninstantiating the ``Pdb`` class and calling the method of the same\nname. If you want to access further features, you have to do this\nyourself:\n\nclass class pdb.Pdb(completekey=\'tab\', stdin=None, stdout=None, skip=None, nosigint=False)\n\n ``Pdb`` is the debugger class.\n\n The *completekey*, *stdin* and *stdout* arguments are passed to the\n underlying ``cmd.Cmd`` class; see the description there.\n\n The *skip* argument, if given, must be an iterable of glob-style\n module name patterns. The debugger will not step into frames that\n originate in a module that matches one of these patterns. [1]\n\n By default, Pdb sets a handler for the SIGINT signal (which is sent\n when the user presses Ctrl-C on the console) when you give a\n ``continue`` command. This allows you to break into the debugger\n again by pressing Ctrl-C. If you want Pdb not to touch the SIGINT\n handler, set *nosigint* tot true.\n\n Example call to enable tracing with *skip*:\n\n import pdb; pdb.Pdb(skip=[\'django.*\']).set_trace()\n\n New in version 3.1: The *skip* argument.\n\n New in version 3.2: The *nosigint* argument. Previously, a SIGINT\n handler was never set by Pdb.\n\n run(statement, globals=None, locals=None)\n runeval(expression, globals=None, locals=None)\n runcall(function, *args, **kwds)\n set_trace()\n\n See the documentation for the functions explained above.\n\n\nDebugger Commands\n=================\n\nThe commands recognized by the debugger are listed below. Most\ncommands can be abbreviated to one or two letters as indicated; e.g.\n``h(elp)`` means that either ``h`` or ``help`` can be used to enter\nthe help command (but not ``he`` or ``hel``, nor ``H`` or ``Help`` or\n``HELP``). Arguments to commands must be separated by whitespace\n(spaces or tabs). Optional arguments are enclosed in square brackets\n(``[]``) in the command syntax; the square brackets must not be typed.\nAlternatives in the command syntax are separated by a vertical bar\n(``|``).\n\nEntering a blank line repeats the last command entered. Exception: if\nthe last command was a ``list`` command, the next 11 lines are listed.\n\nCommands that the debugger doesn\'t recognize are assumed to be Python\nstatements and are executed in the context of the program being\ndebugged. Python statements can also be prefixed with an exclamation\npoint (``!``). This is a powerful way to inspect the program being\ndebugged; it is even possible to change a variable or call a function.\nWhen an exception occurs in such a statement, the exception name is\nprinted but the debugger\'s state is not changed.\n\nThe debugger supports *aliases*. Aliases can have parameters which\nallows one a certain level of adaptability to the context under\nexamination.\n\nMultiple commands may be entered on a single line, separated by\n``;;``. (A single ``;`` is not used as it is the separator for\nmultiple commands in a line that is passed to the Python parser.) No\nintelligence is applied to separating the commands; the input is split\nat the first ``;;`` pair, even if it is in the middle of a quoted\nstring.\n\nIf a file ``.pdbrc`` exists in the user\'s home directory or in the\ncurrent directory, it is read in and executed as if it had been typed\nat the debugger prompt. This is particularly useful for aliases. If\nboth files exist, the one in the home directory is read first and\naliases defined there can be overridden by the local file.\n\nChanged in version 3.2: ``.pdbrc`` can now contain commands that\ncontinue debugging, such as ``continue`` or ``next``. Previously,\nthese commands had no effect.\n\nh(elp) [command]\n\n Without argument, print the list of available commands. With a\n *command* as argument, print help about that command. ``help pdb``\n displays the full documentation (the docstring of the ``pdb``\n module). Since the *command* argument must be an identifier,\n ``help exec`` must be entered to get help on the ``!`` command.\n\nw(here)\n\n Print a stack trace, with the most recent frame at the bottom. An\n arrow indicates the current frame, which determines the context of\n most commands.\n\nd(own) [count]\n\n Move the current frame *count* (default one) levels down in the\n stack trace (to a newer frame).\n\nu(p) [count]\n\n Move the current frame *count* (default one) levels up in the stack\n trace (to an older frame).\n\nb(reak) [([filename:]lineno | function) [, condition]]\n\n With a *lineno* argument, set a break there in the current file.\n With a *function* argument, set a break at the first executable\n statement within that function. The line number may be prefixed\n with a filename and a colon, to specify a breakpoint in another\n file (probably one that hasn\'t been loaded yet). The file is\n searched on ``sys.path``. Note that each breakpoint is assigned a\n number to which all the other breakpoint commands refer.\n\n If a second argument is present, it is an expression which must\n evaluate to true before the breakpoint is honored.\n\n Without argument, list all breaks, including for each breakpoint,\n the number of times that breakpoint has been hit, the current\n ignore count, and the associated condition if any.\n\ntbreak [([filename:]lineno | function) [, condition]]\n\n Temporary breakpoint, which is removed automatically when it is\n first hit. The arguments are the same as for ``break``.\n\ncl(ear) [filename:lineno | bpnumber [bpnumber ...]]\n\n With a *filename:lineno* argument, clear all the breakpoints at\n this line. With a space separated list of breakpoint numbers, clear\n those breakpoints. Without argument, clear all breaks (but first\n ask confirmation).\n\ndisable [bpnumber [bpnumber ...]]\n\n Disable the breakpoints given as a space separated list of\n breakpoint numbers. Disabling a breakpoint means it cannot cause\n the program to stop execution, but unlike clearing a breakpoint, it\n remains in the list of breakpoints and can be (re-)enabled.\n\nenable [bpnumber [bpnumber ...]]\n\n Enable the breakpoints specified.\n\nignore bpnumber [count]\n\n Set the ignore count for the given breakpoint number. If count is\n omitted, the ignore count is set to 0. A breakpoint becomes active\n when the ignore count is zero. When non-zero, the count is\n decremented each time the breakpoint is reached and the breakpoint\n is not disabled and any associated condition evaluates to true.\n\ncondition bpnumber [condition]\n\n Set a new *condition* for the breakpoint, an expression which must\n evaluate to true before the breakpoint is honored. If *condition*\n is absent, any existing condition is removed; i.e., the breakpoint\n is made unconditional.\n\ncommands [bpnumber]\n\n Specify a list of commands for breakpoint number *bpnumber*. The\n commands themselves appear on the following lines. Type a line\n containing just ``end`` to terminate the commands. An example:\n\n (Pdb) commands 1\n (com) print some_variable\n (com) end\n (Pdb)\n\n To remove all commands from a breakpoint, type commands and follow\n it immediately with ``end``; that is, give no commands.\n\n With no *bpnumber* argument, commands refers to the last breakpoint\n set.\n\n You can use breakpoint commands to start your program up again.\n Simply use the continue command, or step, or any other command that\n resumes execution.\n\n Specifying any command resuming execution (currently continue,\n step, next, return, jump, quit and their abbreviations) terminates\n the command list (as if that command was immediately followed by\n end). This is because any time you resume execution (even with a\n simple next or step), you may encounter another breakpoint--which\n could have its own command list, leading to ambiguities about which\n list to execute.\n\n If you use the \'silent\' command in the command list, the usual\n message about stopping at a breakpoint is not printed. This may be\n desirable for breakpoints that are to print a specific message and\n then continue. If none of the other commands print anything, you\n see no sign that the breakpoint was reached.\n\ns(tep)\n\n Execute the current line, stop at the first possible occasion\n (either in a function that is called or on the next line in the\n current function).\n\nn(ext)\n\n Continue execution until the next line in the current function is\n reached or it returns. (The difference between ``next`` and\n ``step`` is that ``step`` stops inside a called function, while\n ``next`` executes called functions at (nearly) full speed, only\n stopping at the next line in the current function.)\n\nunt(il) [lineno]\n\n Without argument, continue execution until the line with a number\n greater than the current one is reached.\n\n With a line number, continue execution until a line with a number\n greater or equal to that is reached. In both cases, also stop when\n the current frame returns.\n\n Changed in version 3.2: Allow giving an explicit line number.\n\nr(eturn)\n\n Continue execution until the current function returns.\n\nc(ont(inue))\n\n Continue execution, only stop when a breakpoint is encountered.\n\nj(ump) lineno\n\n Set the next line that will be executed. Only available in the\n bottom-most frame. This lets you jump back and execute code again,\n or jump forward to skip code that you don\'t want to run.\n\n It should be noted that not all jumps are allowed -- for instance\n it is not possible to jump into the middle of a ``for`` loop or out\n of a ``finally`` clause.\n\nl(ist) [first[, last]]\n\n List source code for the current file. Without arguments, list 11\n lines around the current line or continue the previous listing.\n With ``.`` as argument, list 11 lines around the current line.\n With one argument, list 11 lines around at that line. With two\n arguments, list the given range; if the second argument is less\n than the first, it is interpreted as a count.\n\n The current line in the current frame is indicated by ``->``. If\n an exception is being debugged, the line where the exception was\n originally raised or propagated is indicated by ``>>``, if it\n differs from the current line.\n\n New in version 3.2: The ``>>`` marker.\n\nll | longlist\n\n List all source code for the current function or frame.\n Interesting lines are marked as for ``list``.\n\n New in version 3.2.\n\na(rgs)\n\n Print the argument list of the current function.\n\np(rint) expression\n\n Evaluate the *expression* in the current context and print its\n value.\n\npp expression\n\n Like the ``print`` command, except the value of the expression is\n pretty-printed using the ``pprint`` module.\n\nwhatis expression\n\n Print the type of the *expression*.\n\nsource expression\n\n Try to get source code for the given object and display it.\n\n New in version 3.2.\n\ndisplay [expression]\n\n Display the value of the expression if it changed, each time\n execution stops in the current frame.\n\n Without expression, list all display expressions for the current\n frame.\n\n New in version 3.2.\n\nundisplay [expression]\n\n Do not display the expression any more in the current frame.\n Without expression, clear all display expressions for the current\n frame.\n\n New in version 3.2.\n\ninteract\n\n Start an interative interpreter (using the ``code`` module) whose\n global namespace contains all the (global and local) names found in\n the current scope.\n\n New in version 3.2.\n\nalias [name [command]]\n\n Create an alias called *name* that executes *command*. The command\n must *not* be enclosed in quotes. Replaceable parameters can be\n indicated by ``%1``, ``%2``, and so on, while ``%*`` is replaced by\n all the parameters. If no command is given, the current alias for\n *name* is shown. If no arguments are given, all aliases are listed.\n\n Aliases may be nested and can contain anything that can be legally\n typed at the pdb prompt. Note that internal pdb commands *can* be\n overridden by aliases. Such a command is then hidden until the\n alias is removed. Aliasing is recursively applied to the first\n word of the command line; all other words in the line are left\n alone.\n\n As an example, here are two useful aliases (especially when placed\n in the ``.pdbrc`` file):\n\n # Print instance variables (usage "pi classInst")\n alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k])\n # Print instance variables in self\n alias ps pi self\n\nunalias name\n\n Delete the specified alias.\n\n! statement\n\n Execute the (one-line) *statement* in the context of the current\n stack frame. The exclamation point can be omitted unless the first\n word of the statement resembles a debugger command. To set a\n global variable, you can prefix the assignment command with a\n ``global`` statement on the same line, e.g.:\n\n (Pdb) global list_options; list_options = [\'-l\']\n (Pdb)\n\nrun [args ...]\nrestart [args ...]\n\n Restart the debugged Python program. If an argument is supplied,\n it is split with ``shlex`` and the result is used as the new\n ``sys.argv``. History, breakpoints, actions and debugger options\n are preserved. ``restart`` is an alias for ``run``.\n\nq(uit)\n\n Quit from the debugger. The program being executed is aborted.\n\n-[ Footnotes ]-\n\n[1] Whether a frame is considered to originate in a certain module is\n determined by the ``__name__`` in the frame globals.\n', 'del': '\nThe ``del`` statement\n*********************\n\n del_stmt ::= "del" target_list\n\nDeletion is recursively defined very similar to the way assignment is\ndefined. Rather than spelling it out in full details, here are some\nhints.\n\nDeletion of a target list recursively deletes each target, from left\nto right.\n\nDeletion of a name removes the binding of that name from the local or\nglobal namespace, depending on whether the name occurs in a ``global``\nstatement in the same code block. If the name is unbound, a\n``NameError`` exception will be raised.\n\nDeletion of attribute references, subscriptions and slicings is passed\nto the primary object involved; deletion of a slicing is in general\nequivalent to assignment of an empty slice of the right type (but even\nthis is determined by the sliced object).\n\nChanged in version 3.2: Previously it was illegal to delete a name\nfrom the local namespace if it occurs as a free variable in a nested\nblock.\n', 'dict': '\nDictionary displays\n*******************\n\nA dictionary display is a possibly empty series of key/datum pairs\nenclosed in curly braces:\n\n dict_display ::= "{" [key_datum_list | dict_comprehension] "}"\n key_datum_list ::= key_datum ("," key_datum)* [","]\n key_datum ::= expression ":" expression\n dict_comprehension ::= expression ":" expression comp_for\n\nA dictionary display yields a new dictionary object.\n\nIf a comma-separated sequence of key/datum pairs is given, they are\nevaluated from left to right to define the entries of the dictionary:\neach key object is used as a key into the dictionary to store the\ncorresponding datum. This means that you can specify the same key\nmultiple times in the key/datum list, and the final dictionary\'s value\nfor that key will be the last one given.\n\nA dict comprehension, in contrast to list and set comprehensions,\nneeds two expressions separated with a colon followed by the usual\n"for" and "if" clauses. When the comprehension is run, the resulting\nkey and value elements are inserted in the new dictionary in the order\nthey are produced.\n\nRestrictions on the types of the key values are listed earlier in\nsection *The standard type hierarchy*. (To summarize, the key type\nshould be *hashable*, which excludes all mutable objects.) Clashes\nbetween duplicate keys are not detected; the last datum (textually\nrightmost in the display) stored for a given key value prevails.\n', @@ -34,14 +34,14 @@ 'exprlists': '\nExpression lists\n****************\n\n expression_list ::= expression ( "," expression )* [","]\n\nAn expression list containing at least one comma yields a tuple. The\nlength of the tuple is the number of expressions in the list. The\nexpressions are evaluated from left to right.\n\nThe trailing comma is required only to create a single tuple (a.k.a. a\n*singleton*); it is optional in all other cases. A single expression\nwithout a trailing comma doesn\'t create a tuple, but rather yields the\nvalue of that expression. (To create an empty tuple, use an empty pair\nof parentheses: ``()``.)\n', 'floating': '\nFloating point literals\n***********************\n\nFloating point literals are described by the following lexical\ndefinitions:\n\n floatnumber ::= pointfloat | exponentfloat\n pointfloat ::= [intpart] fraction | intpart "."\n exponentfloat ::= (intpart | pointfloat) exponent\n intpart ::= digit+\n fraction ::= "." digit+\n exponent ::= ("e" | "E") ["+" | "-"] digit+\n\nNote that the integer and exponent parts are always interpreted using\nradix 10. For example, ``077e010`` is legal, and denotes the same\nnumber as ``77e10``. The allowed range of floating point literals is\nimplementation-dependent. Some examples of floating point literals:\n\n 3.14 10. .001 1e100 3.14e-10 0e0\n\nNote that numeric literals do not include a sign; a phrase like ``-1``\nis actually an expression composed of the unary operator ``-`` and the\nliteral ``1``.\n', 'for': '\nThe ``for`` statement\n*********************\n\nThe ``for`` statement is used to iterate over the elements of a\nsequence (such as a string, tuple or list) or other iterable object:\n\n for_stmt ::= "for" target_list "in" expression_list ":" suite\n ["else" ":" suite]\n\nThe expression list is evaluated once; it should yield an iterable\nobject. An iterator is created for the result of the\n``expression_list``. The suite is then executed once for each item\nprovided by the iterator, in the order of ascending indices. Each\nitem in turn is assigned to the target list using the standard rules\nfor assignments (see *Assignment statements*), and then the suite is\nexecuted. When the items are exhausted (which is immediately when the\nsequence is empty or an iterator raises a ``StopIteration``\nexception), the suite in the ``else`` clause, if present, is executed,\nand the loop terminates.\n\nA ``break`` statement executed in the first suite terminates the loop\nwithout executing the ``else`` clause\'s suite. A ``continue``\nstatement executed in the first suite skips the rest of the suite and\ncontinues with the next item, or with the ``else`` clause if there was\nno next item.\n\nThe suite may assign to the variable(s) in the target list; this does\nnot affect the next item assigned to it.\n\nNames in the target list are not deleted when the loop is finished,\nbut if the sequence is empty, it will not have been assigned to at all\nby the loop. Hint: the built-in function ``range()`` returns an\niterator of integers suitable to emulate the effect of Pascal\'s ``for\ni := a to b do``; e.g., ``list(range(3))`` returns the list ``[0, 1,\n2]``.\n\nNote: There is a subtlety when the sequence is being modified by the loop\n (this can only occur for mutable sequences, i.e. lists). An\n internal counter is used to keep track of which item is used next,\n and this is incremented on each iteration. When this counter has\n reached the length of the sequence the loop terminates. This means\n that if the suite deletes the current (or a previous) item from the\n sequence, the next item will be skipped (since it gets the index of\n the current item which has already been treated). Likewise, if the\n suite inserts an item in the sequence before the current item, the\n current item will be treated again the next time through the loop.\n This can lead to nasty bugs that can be avoided by making a\n temporary copy using a slice of the whole sequence, e.g.,\n\n for x in a[:]:\n if x < 0: a.remove(x)\n', - 'formatstrings': '\nFormat String Syntax\n********************\n\nThe ``str.format()`` method and the ``Formatter`` class share the same\nsyntax for format strings (although in the case of ``Formatter``,\nsubclasses can define their own format string syntax).\n\nFormat strings contain "replacement fields" surrounded by curly braces\n``{}``. Anything that is not contained in braces is considered literal\ntext, which is copied unchanged to the output. If you need to include\na brace character in the literal text, it can be escaped by doubling:\n``{{`` and ``}}``.\n\nThe grammar for a replacement field is as follows:\n\n replacement_field ::= "{" [field_name] ["!" conversion] [":" format_spec] "}"\n field_name ::= arg_name ("." attribute_name | "[" element_index "]")*\n arg_name ::= [identifier | integer]\n attribute_name ::= identifier\n element_index ::= integer | index_string\n index_string ::= +\n conversion ::= "r" | "s" | "a"\n format_spec ::= \n\nIn less formal terms, the replacement field can start with a\n*field_name* that specifies the object whose value is to be formatted\nand inserted into the output instead of the replacement field. The\n*field_name* is optionally followed by a *conversion* field, which is\npreceded by an exclamation point ``\'!\'``, and a *format_spec*, which\nis preceded by a colon ``\':\'``. These specify a non-default format\nfor the replacement value.\n\nSee also the *Format Specification Mini-Language* section.\n\nThe *field_name* itself begins with an *arg_name* that is either a\nnumber or a keyword. If it\'s a number, it refers to a positional\nargument, and if it\'s a keyword, it refers to a named keyword\nargument. If the numerical arg_names in a format string are 0, 1, 2,\n... in sequence, they can all be omitted (not just some) and the\nnumbers 0, 1, 2, ... will be automatically inserted in that order.\nBecause *arg_name* is not quote-delimited, it is not possible to\nspecify arbitrary dictionary keys (e.g., the strings ``\'10\'`` or\n``\':-]\'``) within a format string. The *arg_name* can be followed by\nany number of index or attribute expressions. An expression of the\nform ``\'.name\'`` selects the named attribute using ``getattr()``,\nwhile an expression of the form ``\'[index]\'`` does an index lookup\nusing ``__getitem__()``.\n\nChanged in version 3.1: The positional argument specifiers can be\nomitted, so ``\'{} {}\'`` is equivalent to ``\'{0} {1}\'``.\n\nSome simple format string examples:\n\n "First, thou shalt count to {0}" # References first positional argument\n "Bring me a {}" # Implicitly references the first positional argument\n "From {} to {}" # Same as "From {0} to {1}"\n "My quest is {name}" # References keyword argument \'name\'\n "Weight in tons {0.weight}" # \'weight\' attribute of first positional arg\n "Units destroyed: {players[0]}" # First element of keyword argument \'players\'.\n\nThe *conversion* field causes a type coercion before formatting.\nNormally, the job of formatting a value is done by the\n``__format__()`` method of the value itself. However, in some cases\nit is desirable to force a type to be formatted as a string,\noverriding its own definition of formatting. By converting the value\nto a string before calling ``__format__()``, the normal formatting\nlogic is bypassed.\n\nThree conversion flags are currently supported: ``\'!s\'`` which calls\n``str()`` on the value, ``\'!r\'`` which calls ``repr()`` and ``\'!a\'``\nwhich calls ``ascii()``.\n\nSome examples:\n\n "Harold\'s a clever {0!s}" # Calls str() on the argument first\n "Bring out the holy {name!r}" # Calls repr() on the argument first\n "More {!a}" # Calls ascii() on the argument first\n\nThe *format_spec* field contains a specification of how the value\nshould be presented, including such details as field width, alignment,\npadding, decimal precision and so on. Each value type can define its\nown "formatting mini-language" or interpretation of the *format_spec*.\n\nMost built-in types support a common formatting mini-language, which\nis described in the next section.\n\nA *format_spec* field can also include nested replacement fields\nwithin it. These nested replacement fields can contain only a field\nname; conversion flags and format specifications are not allowed. The\nreplacement fields within the format_spec are substituted before the\n*format_spec* string is interpreted. This allows the formatting of a\nvalue to be dynamically specified.\n\nSee the *Format examples* section for some examples.\n\n\nFormat Specification Mini-Language\n==================================\n\n"Format specifications" are used within replacement fields contained\nwithin a format string to define how individual values are presented\n(see *Format String Syntax*). They can also be passed directly to the\nbuilt-in ``format()`` function. Each formattable type may define how\nthe format specification is to be interpreted.\n\nMost built-in types implement the following options for format\nspecifications, although some of the formatting options are only\nsupported by the numeric types.\n\nA general convention is that an empty format string (``""``) produces\nthe same result as if you had called ``str()`` on the value. A non-\nempty format string typically modifies the result.\n\nThe general form of a *standard format specifier* is:\n\n format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]\n fill ::= \n align ::= "<" | ">" | "=" | "^"\n sign ::= "+" | "-" | " "\n width ::= integer\n precision ::= integer\n type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"\n\nThe *fill* character can be any character other than \'{\' or \'}\'. The\npresence of a fill character is signaled by the character following\nit, which must be one of the alignment options. If the second\ncharacter of *format_spec* is not a valid alignment option, then it is\nassumed that both the fill character and the alignment option are\nabsent.\n\nThe meaning of the various alignment options is as follows:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'<\'`` | Forces the field to be left-aligned within the available |\n | | space (this is the default for most objects). |\n +-----------+------------------------------------------------------------+\n | ``\'>\'`` | Forces the field to be right-aligned within the available |\n | | space (this is the default for numbers). |\n +-----------+------------------------------------------------------------+\n | ``\'=\'`` | Forces the padding to be placed after the sign (if any) |\n | | but before the digits. This is used for printing fields |\n | | in the form \'+000000120\'. This alignment option is only |\n | | valid for numeric types. |\n +-----------+------------------------------------------------------------+\n | ``\'^\'`` | Forces the field to be centered within the available |\n | | space. |\n +-----------+------------------------------------------------------------+\n\nNote that unless a minimum field width is defined, the field width\nwill always be the same size as the data to fill it, so that the\nalignment option has no meaning in this case.\n\nThe *sign* option is only valid for number types, and can be one of\nthe following:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'+\'`` | indicates that a sign should be used for both positive as |\n | | well as negative numbers. |\n +-----------+------------------------------------------------------------+\n | ``\'-\'`` | indicates that a sign should be used only for negative |\n | | numbers (this is the default behavior). |\n +-----------+------------------------------------------------------------+\n | space | indicates that a leading space should be used on positive |\n | | numbers, and a minus sign on negative numbers. |\n +-----------+------------------------------------------------------------+\n\nThe ``\'#\'`` option causes the "alternate form" to be used for the\nconversion. The alternate form is defined differently for different\ntypes. This option is only valid for integer, float, complex and\nDecimal types. For integers, when binary, octal, or hexadecimal output\nis used, this option adds the prefix respective ``\'0b\'``, ``\'0o\'``, or\n``\'0x\'`` to the output value. For floats, complex and Decimal the\nalternate form causes the result of the conversion to always contain a\ndecimal-point character, even if no digits follow it. Normally, a\ndecimal-point character appears in the result of these conversions\nonly if a digit follows it. In addition, for ``\'g\'`` and ``\'G\'``\nconversions, trailing zeros are not removed from the result.\n\nThe ``\',\'`` option signals the use of a comma for a thousands\nseparator. For a locale aware separator, use the ``\'n\'`` integer\npresentation type instead.\n\nChanged in version 3.1: Added the ``\',\'`` option (see also **PEP\n378**).\n\n*width* is a decimal integer defining the minimum field width. If not\nspecified, then the field width will be determined by the content.\n\nPreceding the *width* field by a zero (``\'0\'``) character enables\nsign-aware zero-padding for numeric types. This is equivalent to a\n*fill* character of ``\'0\'`` with an *alignment* type of ``\'=\'``.\n\nThe *precision* is a decimal number indicating how many digits should\nbe displayed after the decimal point for a floating point value\nformatted with ``\'f\'`` and ``\'F\'``, or before and after the decimal\npoint for a floating point value formatted with ``\'g\'`` or ``\'G\'``.\nFor non-number types the field indicates the maximum field size - in\nother words, how many characters will be used from the field content.\nThe *precision* is not allowed for integer values.\n\nFinally, the *type* determines how the data should be presented.\n\nThe available string presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'s\'`` | String format. This is the default type for strings and |\n | | may be omitted. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'s\'``. |\n +-----------+------------------------------------------------------------+\n\nThe available integer presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'b\'`` | Binary format. Outputs the number in base 2. |\n +-----------+------------------------------------------------------------+\n | ``\'c\'`` | Character. Converts the integer to the corresponding |\n | | unicode character before printing. |\n +-----------+------------------------------------------------------------+\n | ``\'d\'`` | Decimal Integer. Outputs the number in base 10. |\n +-----------+------------------------------------------------------------+\n | ``\'o\'`` | Octal format. Outputs the number in base 8. |\n +-----------+------------------------------------------------------------+\n | ``\'x\'`` | Hex format. Outputs the number in base 16, using lower- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'X\'`` | Hex format. Outputs the number in base 16, using upper- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'d\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'d\'``. |\n +-----------+------------------------------------------------------------+\n\nIn addition to the above presentation types, integers can be formatted\nwith the floating point presentation types listed below (except\n``\'n\'`` and None). When doing so, ``float()`` is used to convert the\ninteger to a floating point number before formatting.\n\nThe available presentation types for floating point and decimal values\nare:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'e\'`` | Exponent notation. Prints the number in scientific |\n | | notation using the letter \'e\' to indicate the exponent. |\n +-----------+------------------------------------------------------------+\n | ``\'E\'`` | Exponent notation. Same as ``\'e\'`` except it uses an upper |\n | | case \'E\' as the separator character. |\n +-----------+------------------------------------------------------------+\n | ``\'f\'`` | Fixed point. Displays the number as a fixed-point number. |\n +-----------+------------------------------------------------------------+\n | ``\'F\'`` | Fixed point. Same as ``\'f\'``, but converts ``nan`` to |\n | | ``NAN`` and ``inf`` to ``INF``. |\n +-----------+------------------------------------------------------------+\n | ``\'g\'`` | General format. For a given precision ``p >= 1``, this |\n | | rounds the number to ``p`` significant digits and then |\n | | formats the result in either fixed-point format or in |\n | | scientific notation, depending on its magnitude. The |\n | | precise rules are as follows: suppose that the result |\n | | formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1`` would have exponent ``exp``. Then if ``-4 <= exp |\n | | < p``, the number is formatted with presentation type |\n | | ``\'f\'`` and precision ``p-1-exp``. Otherwise, the number |\n | | is formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1``. In both cases insignificant trailing zeros are |\n | | removed from the significand, and the decimal point is |\n | | also removed if there are no remaining digits following |\n | | it. Positive and negative infinity, positive and negative |\n | | zero, and nans, are formatted as ``inf``, ``-inf``, ``0``, |\n | | ``-0`` and ``nan`` respectively, regardless of the |\n | | precision. A precision of ``0`` is treated as equivalent |\n | | to a precision of ``1``. |\n +-----------+------------------------------------------------------------+\n | ``\'G\'`` | General format. Same as ``\'g\'`` except switches to ``\'E\'`` |\n | | if the number gets too large. The representations of |\n | | infinity and NaN are uppercased, too. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'g\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | ``\'%\'`` | Percentage. Multiplies the number by 100 and displays in |\n | | fixed (``\'f\'``) format, followed by a percent sign. |\n +-----------+------------------------------------------------------------+\n | None | Similar to ``\'g\'``, except with at least one digit past |\n | | the decimal point and a default precision of 12. This is |\n | | intended to match ``str()``, except you can add the other |\n | | format modifiers. |\n +-----------+------------------------------------------------------------+\n\n\nFormat examples\n===============\n\nThis section contains examples of the new format syntax and comparison\nwith the old ``%``-formatting.\n\nIn most of the cases the syntax is similar to the old\n``%``-formatting, with the addition of the ``{}`` and with ``:`` used\ninstead of ``%``. For example, ``\'%03.2f\'`` can be translated to\n``\'{:03.2f}\'``.\n\nThe new format syntax also supports new and different options, shown\nin the follow examples.\n\nAccessing arguments by position:\n\n >>> \'{0}, {1}, {2}\'.format(\'a\', \'b\', \'c\')\n \'a, b, c\'\n >>> \'{}, {}, {}\'.format(\'a\', \'b\', \'c\') # 3.1+ only\n \'a, b, c\'\n >>> \'{2}, {1}, {0}\'.format(\'a\', \'b\', \'c\')\n \'c, b, a\'\n >>> \'{2}, {1}, {0}\'.format(*\'abc\') # unpacking argument sequence\n \'c, b, a\'\n >>> \'{0}{1}{0}\'.format(\'abra\', \'cad\') # arguments\' indices can be repeated\n \'abracadabra\'\n\nAccessing arguments by name:\n\n >>> \'Coordinates: {latitude}, {longitude}\'.format(latitude=\'37.24N\', longitude=\'-115.81W\')\n \'Coordinates: 37.24N, -115.81W\'\n >>> coord = {\'latitude\': \'37.24N\', \'longitude\': \'-115.81W\'}\n >>> \'Coordinates: {latitude}, {longitude}\'.format(**coord)\n \'Coordinates: 37.24N, -115.81W\'\n\nAccessing arguments\' attributes:\n\n >>> c = 3-5j\n >>> (\'The complex number {0} is formed from the real part {0.real} \'\n ... \'and the imaginary part {0.imag}.\').format(c)\n \'The complex number (3-5j) is formed from the real part 3.0 and the imaginary part -5.0.\'\n >>> class Point:\n ... def __init__(self, x, y):\n ... self.x, self.y = x, y\n ... def __str__(self):\n ... return \'Point({self.x}, {self.y})\'.format(self=self)\n ...\n >>> str(Point(4, 2))\n \'Point(4, 2)\'\n\nAccessing arguments\' items:\n\n >>> coord = (3, 5)\n >>> \'X: {0[0]}; Y: {0[1]}\'.format(coord)\n \'X: 3; Y: 5\'\n\nReplacing ``%s`` and ``%r``:\n\n >>> "repr() shows quotes: {!r}; str() doesn\'t: {!s}".format(\'test1\', \'test2\')\n "repr() shows quotes: \'test1\'; str() doesn\'t: test2"\n\nAligning the text and specifying a width:\n\n >>> \'{:<30}\'.format(\'left aligned\')\n \'left aligned \'\n >>> \'{:>30}\'.format(\'right aligned\')\n \' right aligned\'\n >>> \'{:^30}\'.format(\'centered\')\n \' centered \'\n >>> \'{:*^30}\'.format(\'centered\') # use \'*\' as a fill char\n \'***********centered***********\'\n\nReplacing ``%+f``, ``%-f``, and ``% f`` and specifying a sign:\n\n >>> \'{:+f}; {:+f}\'.format(3.14, -3.14) # show it always\n \'+3.140000; -3.140000\'\n >>> \'{: f}; {: f}\'.format(3.14, -3.14) # show a space for positive numbers\n \' 3.140000; -3.140000\'\n >>> \'{:-f}; {:-f}\'.format(3.14, -3.14) # show only the minus -- same as \'{:f}; {:f}\'\n \'3.140000; -3.140000\'\n\nReplacing ``%x`` and ``%o`` and converting the value to different\nbases:\n\n >>> # format also supports binary numbers\n >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}".format(42)\n \'int: 42; hex: 2a; oct: 52; bin: 101010\'\n >>> # with 0x, 0o, or 0b as prefix:\n >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}".format(42)\n \'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010\'\n\nUsing the comma as a thousands separator:\n\n >>> \'{:,}\'.format(1234567890)\n \'1,234,567,890\'\n\nExpressing a percentage:\n\n >>> points = 19\n >>> total = 22\n >>> \'Correct answers: {:.2%}\'.format(points/total)\n \'Correct answers: 86.36%\'\n\nUsing type-specific formatting:\n\n >>> import datetime\n >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\n >>> \'{:%Y-%m-%d %H:%M:%S}\'.format(d)\n \'2010-07-04 12:15:58\'\n\nNesting arguments and more complex examples:\n\n >>> for align, text in zip(\'<^>\', [\'left\', \'center\', \'right\']):\n ... \'{0:{fill}{align}16}\'.format(text, fill=align, align=align)\n ...\n \'left<<<<<<<<<<<<\'\n \'^^^^^center^^^^^\'\n \'>>>>>>>>>>>right\'\n >>>\n >>> octets = [192, 168, 0, 1]\n >>> \'{:02X}{:02X}{:02X}{:02X}\'.format(*octets)\n \'C0A80001\'\n >>> int(_, 16)\n 3232235521\n >>>\n >>> width = 5\n >>> for num in range(5,12): #doctest: +NORMALIZE_WHITESPACE\n ... for base in \'dXob\':\n ... print(\'{0:{width}{base}}\'.format(num, base=base, width=width), end=\' \')\n ... print()\n ...\n 5 5 5 101\n 6 6 6 110\n 7 7 7 111\n 8 8 10 1000\n 9 9 11 1001\n 10 A 12 1010\n 11 B 13 1011\n', - 'function': '\nFunction definitions\n********************\n\nA function definition defines a user-defined function object (see\nsection *The standard type hierarchy*):\n\n funcdef ::= [decorators] "def" funcname "(" [parameter_list] ")" ["->" expression] ":" suite\n decorators ::= decorator+\n decorator ::= "@" dotted_name ["(" [parameter_list [","]] ")"] NEWLINE\n dotted_name ::= identifier ("." identifier)*\n parameter_list ::= (defparameter ",")*\n ( "*" [parameter] ("," defparameter)* ["," "**" parameter]\n | "**" parameter\n | defparameter [","] )\n parameter ::= identifier [":" expression]\n defparameter ::= parameter ["=" expression]\n funcname ::= identifier\n\nA function definition is an executable statement. Its execution binds\nthe function name in the current local namespace to a function object\n(a wrapper around the executable code for the function). This\nfunction object contains a reference to the current global namespace\nas the global namespace to be used when the function is called.\n\nThe function definition does not execute the function body; this gets\nexecuted only when the function is called. [3]\n\nA function definition may be wrapped by one or more *decorator*\nexpressions. Decorator expressions are evaluated when the function is\ndefined, in the scope that contains the function definition. The\nresult must be a callable, which is invoked with the function object\nas the only argument. The returned value is bound to the function name\ninstead of the function object. Multiple decorators are applied in\nnested fashion. For example, the following code\n\n @f1(arg)\n @f2\n def func(): pass\n\nis equivalent to\n\n def func(): pass\n func = f1(arg)(f2(func))\n\nWhen one or more *parameters* have the form *parameter* ``=``\n*expression*, the function is said to have "default parameter values."\nFor a parameter with a default value, the corresponding *argument* may\nbe omitted from a call, in which case the parameter\'s default value is\nsubstituted. If a parameter has a default value, all following\nparameters up until the "``*``" must also have a default value ---\nthis is a syntactic restriction that is not expressed by the grammar.\n\n**Default parameter values are evaluated when the function definition\nis executed.** This means that the expression is evaluated once, when\nthe function is defined, and that the same "pre-computed" value is\nused for each call. This is especially important to understand when a\ndefault parameter is a mutable object, such as a list or a dictionary:\nif the function modifies the object (e.g. by appending an item to a\nlist), the default value is in effect modified. This is generally not\nwhat was intended. A way around this is to use ``None`` as the\ndefault, and explicitly test for it in the body of the function, e.g.:\n\n def whats_on_the_telly(penguin=None):\n if penguin is None:\n penguin = []\n penguin.append("property of the zoo")\n return penguin\n\nFunction call semantics are described in more detail in section\n*Calls*. A function call always assigns values to all parameters\nmentioned in the parameter list, either from position arguments, from\nkeyword arguments, or from default values. If the form\n"``*identifier``" is present, it is initialized to a tuple receiving\nany excess positional parameters, defaulting to the empty tuple. If\nthe form "``**identifier``" is present, it is initialized to a new\ndictionary receiving any excess keyword arguments, defaulting to a new\nempty dictionary. Parameters after "``*``" or "``*identifier``" are\nkeyword-only parameters and may only be passed used keyword arguments.\n\nParameters may have annotations of the form "``: expression``"\nfollowing the parameter name. Any parameter may have an annotation\neven those of the form ``*identifier`` or ``**identifier``. Functions\nmay have "return" annotation of the form "``-> expression``" after the\nparameter list. These annotations can be any valid Python expression\nand are evaluated when the function definition is executed.\nAnnotations may be evaluated in a different order than they appear in\nthe source code. The presence of annotations does not change the\nsemantics of a function. The annotation values are available as\nvalues of a dictionary keyed by the parameters\' names in the\n``__annotations__`` attribute of the function object.\n\nIt is also possible to create anonymous functions (functions not bound\nto a name), for immediate use in expressions. This uses lambda forms,\ndescribed in section *Lambdas*. Note that the lambda form is merely a\nshorthand for a simplified function definition; a function defined in\na "``def``" statement can be passed around or assigned to another name\njust like a function defined by a lambda form. The "``def``" form is\nactually more powerful since it allows the execution of multiple\nstatements and annotations.\n\n**Programmer\'s note:** Functions are first-class objects. A "``def``"\nform executed inside a function definition defines a local function\nthat can be returned or passed around. Free variables used in the\nnested function can access the local variables of the function\ncontaining the def. See section *Naming and binding* for details.\n\nSee also:\n\n **PEP 3107** - Function Annotations\n The original specification for function annotations.\n', + 'formatstrings': '\nFormat String Syntax\n********************\n\nThe ``str.format()`` method and the ``Formatter`` class share the same\nsyntax for format strings (although in the case of ``Formatter``,\nsubclasses can define their own format string syntax).\n\nFormat strings contain "replacement fields" surrounded by curly braces\n``{}``. Anything that is not contained in braces is considered literal\ntext, which is copied unchanged to the output. If you need to include\na brace character in the literal text, it can be escaped by doubling:\n``{{`` and ``}}``.\n\nThe grammar for a replacement field is as follows:\n\n replacement_field ::= "{" [field_name] ["!" conversion] [":" format_spec] "}"\n field_name ::= arg_name ("." attribute_name | "[" element_index "]")*\n arg_name ::= [identifier | integer]\n attribute_name ::= identifier\n element_index ::= integer | index_string\n index_string ::= +\n conversion ::= "r" | "s" | "a"\n format_spec ::= \n\nIn less formal terms, the replacement field can start with a\n*field_name* that specifies the object whose value is to be formatted\nand inserted into the output instead of the replacement field. The\n*field_name* is optionally followed by a *conversion* field, which is\npreceded by an exclamation point ``\'!\'``, and a *format_spec*, which\nis preceded by a colon ``\':\'``. These specify a non-default format\nfor the replacement value.\n\nSee also the *Format Specification Mini-Language* section.\n\nThe *field_name* itself begins with an *arg_name* that is either a\nnumber or a keyword. If it\'s a number, it refers to a positional\nargument, and if it\'s a keyword, it refers to a named keyword\nargument. If the numerical arg_names in a format string are 0, 1, 2,\n... in sequence, they can all be omitted (not just some) and the\nnumbers 0, 1, 2, ... will be automatically inserted in that order.\nBecause *arg_name* is not quote-delimited, it is not possible to\nspecify arbitrary dictionary keys (e.g., the strings ``\'10\'`` or\n``\':-]\'``) within a format string. The *arg_name* can be followed by\nany number of index or attribute expressions. An expression of the\nform ``\'.name\'`` selects the named attribute using ``getattr()``,\nwhile an expression of the form ``\'[index]\'`` does an index lookup\nusing ``__getitem__()``.\n\nChanged in version 3.1: The positional argument specifiers can be\nomitted, so ``\'{} {}\'`` is equivalent to ``\'{0} {1}\'``.\n\nSome simple format string examples:\n\n "First, thou shalt count to {0}" # References first positional argument\n "Bring me a {}" # Implicitly references the first positional argument\n "From {} to {}" # Same as "From {0} to {1}"\n "My quest is {name}" # References keyword argument \'name\'\n "Weight in tons {0.weight}" # \'weight\' attribute of first positional arg\n "Units destroyed: {players[0]}" # First element of keyword argument \'players\'.\n\nThe *conversion* field causes a type coercion before formatting.\nNormally, the job of formatting a value is done by the\n``__format__()`` method of the value itself. However, in some cases\nit is desirable to force a type to be formatted as a string,\noverriding its own definition of formatting. By converting the value\nto a string before calling ``__format__()``, the normal formatting\nlogic is bypassed.\n\nThree conversion flags are currently supported: ``\'!s\'`` which calls\n``str()`` on the value, ``\'!r\'`` which calls ``repr()`` and ``\'!a\'``\nwhich calls ``ascii()``.\n\nSome examples:\n\n "Harold\'s a clever {0!s}" # Calls str() on the argument first\n "Bring out the holy {name!r}" # Calls repr() on the argument first\n "More {!a}" # Calls ascii() on the argument first\n\nThe *format_spec* field contains a specification of how the value\nshould be presented, including such details as field width, alignment,\npadding, decimal precision and so on. Each value type can define its\nown "formatting mini-language" or interpretation of the *format_spec*.\n\nMost built-in types support a common formatting mini-language, which\nis described in the next section.\n\nA *format_spec* field can also include nested replacement fields\nwithin it. These nested replacement fields can contain only a field\nname; conversion flags and format specifications are not allowed. The\nreplacement fields within the format_spec are substituted before the\n*format_spec* string is interpreted. This allows the formatting of a\nvalue to be dynamically specified.\n\nSee the *Format examples* section for some examples.\n\n\nFormat Specification Mini-Language\n==================================\n\n"Format specifications" are used within replacement fields contained\nwithin a format string to define how individual values are presented\n(see *Format String Syntax*). They can also be passed directly to the\nbuilt-in ``format()`` function. Each formattable type may define how\nthe format specification is to be interpreted.\n\nMost built-in types implement the following options for format\nspecifications, although some of the formatting options are only\nsupported by the numeric types.\n\nA general convention is that an empty format string (``""``) produces\nthe same result as if you had called ``str()`` on the value. A non-\nempty format string typically modifies the result.\n\nThe general form of a *standard format specifier* is:\n\n format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]\n fill ::= \n align ::= "<" | ">" | "=" | "^"\n sign ::= "+" | "-" | " "\n width ::= integer\n precision ::= integer\n type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"\n\nThe *fill* character can be any character other than \'{\' or \'}\'. The\npresence of a fill character is signaled by the character following\nit, which must be one of the alignment options. If the second\ncharacter of *format_spec* is not a valid alignment option, then it is\nassumed that both the fill character and the alignment option are\nabsent.\n\nThe meaning of the various alignment options is as follows:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'<\'`` | Forces the field to be left-aligned within the available |\n | | space (this is the default for most objects). |\n +-----------+------------------------------------------------------------+\n | ``\'>\'`` | Forces the field to be right-aligned within the available |\n | | space (this is the default for numbers). |\n +-----------+------------------------------------------------------------+\n | ``\'=\'`` | Forces the padding to be placed after the sign (if any) |\n | | but before the digits. This is used for printing fields |\n | | in the form \'+000000120\'. This alignment option is only |\n | | valid for numeric types. |\n +-----------+------------------------------------------------------------+\n | ``\'^\'`` | Forces the field to be centered within the available |\n | | space. |\n +-----------+------------------------------------------------------------+\n\nNote that unless a minimum field width is defined, the field width\nwill always be the same size as the data to fill it, so that the\nalignment option has no meaning in this case.\n\nThe *sign* option is only valid for number types, and can be one of\nthe following:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'+\'`` | indicates that a sign should be used for both positive as |\n | | well as negative numbers. |\n +-----------+------------------------------------------------------------+\n | ``\'-\'`` | indicates that a sign should be used only for negative |\n | | numbers (this is the default behavior). |\n +-----------+------------------------------------------------------------+\n | space | indicates that a leading space should be used on positive |\n | | numbers, and a minus sign on negative numbers. |\n +-----------+------------------------------------------------------------+\n\nThe ``\'#\'`` option causes the "alternate form" to be used for the\nconversion. The alternate form is defined differently for different\ntypes. This option is only valid for integer, float, complex and\nDecimal types. For integers, when binary, octal, or hexadecimal output\nis used, this option adds the prefix respective ``\'0b\'``, ``\'0o\'``, or\n``\'0x\'`` to the output value. For floats, complex and Decimal the\nalternate form causes the result of the conversion to always contain a\ndecimal-point character, even if no digits follow it. Normally, a\ndecimal-point character appears in the result of these conversions\nonly if a digit follows it. In addition, for ``\'g\'`` and ``\'G\'``\nconversions, trailing zeros are not removed from the result.\n\nThe ``\',\'`` option signals the use of a comma for a thousands\nseparator. For a locale aware separator, use the ``\'n\'`` integer\npresentation type instead.\n\nChanged in version 3.1: Added the ``\',\'`` option (see also **PEP\n378**).\n\n*width* is a decimal integer defining the minimum field width. If not\nspecified, then the field width will be determined by the content.\n\nPreceding the *width* field by a zero (``\'0\'``) character enables\nsign-aware zero-padding for numeric types. This is equivalent to a\n*fill* character of ``\'0\'`` with an *alignment* type of ``\'=\'``.\n\nThe *precision* is a decimal number indicating how many digits should\nbe displayed after the decimal point for a floating point value\nformatted with ``\'f\'`` and ``\'F\'``, or before and after the decimal\npoint for a floating point value formatted with ``\'g\'`` or ``\'G\'``.\nFor non-number types the field indicates the maximum field size - in\nother words, how many characters will be used from the field content.\nThe *precision* is not allowed for integer values.\n\nFinally, the *type* determines how the data should be presented.\n\nThe available string presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'s\'`` | String format. This is the default type for strings and |\n | | may be omitted. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'s\'``. |\n +-----------+------------------------------------------------------------+\n\nThe available integer presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'b\'`` | Binary format. Outputs the number in base 2. |\n +-----------+------------------------------------------------------------+\n | ``\'c\'`` | Character. Converts the integer to the corresponding |\n | | unicode character before printing. |\n +-----------+------------------------------------------------------------+\n | ``\'d\'`` | Decimal Integer. Outputs the number in base 10. |\n +-----------+------------------------------------------------------------+\n | ``\'o\'`` | Octal format. Outputs the number in base 8. |\n +-----------+------------------------------------------------------------+\n | ``\'x\'`` | Hex format. Outputs the number in base 16, using lower- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'X\'`` | Hex format. Outputs the number in base 16, using upper- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'d\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'d\'``. |\n +-----------+------------------------------------------------------------+\n\nIn addition to the above presentation types, integers can be formatted\nwith the floating point presentation types listed below (except\n``\'n\'`` and None). When doing so, ``float()`` is used to convert the\ninteger to a floating point number before formatting.\n\nThe available presentation types for floating point and decimal values\nare:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'e\'`` | Exponent notation. Prints the number in scientific |\n | | notation using the letter \'e\' to indicate the exponent. |\n | | The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'E\'`` | Exponent notation. Same as ``\'e\'`` except it uses an upper |\n | | case \'E\' as the separator character. |\n +-----------+------------------------------------------------------------+\n | ``\'f\'`` | Fixed point. Displays the number as a fixed-point number. |\n | | The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'F\'`` | Fixed point. Same as ``\'f\'``, but converts ``nan`` to |\n | | ``NAN`` and ``inf`` to ``INF``. |\n +-----------+------------------------------------------------------------+\n | ``\'g\'`` | General format. For a given precision ``p >= 1``, this |\n | | rounds the number to ``p`` significant digits and then |\n | | formats the result in either fixed-point format or in |\n | | scientific notation, depending on its magnitude. The |\n | | precise rules are as follows: suppose that the result |\n | | formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1`` would have exponent ``exp``. Then if ``-4 <= exp |\n | | < p``, the number is formatted with presentation type |\n | | ``\'f\'`` and precision ``p-1-exp``. Otherwise, the number |\n | | is formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1``. In both cases insignificant trailing zeros are |\n | | removed from the significand, and the decimal point is |\n | | also removed if there are no remaining digits following |\n | | it. Positive and negative infinity, positive and negative |\n | | zero, and nans, are formatted as ``inf``, ``-inf``, ``0``, |\n | | ``-0`` and ``nan`` respectively, regardless of the |\n | | precision. A precision of ``0`` is treated as equivalent |\n | | to a precision of ``1``. The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'G\'`` | General format. Same as ``\'g\'`` except switches to ``\'E\'`` |\n | | if the number gets too large. The representations of |\n | | infinity and NaN are uppercased, too. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'g\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | ``\'%\'`` | Percentage. Multiplies the number by 100 and displays in |\n | | fixed (``\'f\'``) format, followed by a percent sign. |\n +-----------+------------------------------------------------------------+\n | None | Similar to ``\'g\'``, except with at least one digit past |\n | | the decimal point and a default precision of 12. This is |\n | | intended to match ``str()``, except you can add the other |\n | | format modifiers. |\n +-----------+------------------------------------------------------------+\n\n\nFormat examples\n===============\n\nThis section contains examples of the new format syntax and comparison\nwith the old ``%``-formatting.\n\nIn most of the cases the syntax is similar to the old\n``%``-formatting, with the addition of the ``{}`` and with ``:`` used\ninstead of ``%``. For example, ``\'%03.2f\'`` can be translated to\n``\'{:03.2f}\'``.\n\nThe new format syntax also supports new and different options, shown\nin the follow examples.\n\nAccessing arguments by position:\n\n >>> \'{0}, {1}, {2}\'.format(\'a\', \'b\', \'c\')\n \'a, b, c\'\n >>> \'{}, {}, {}\'.format(\'a\', \'b\', \'c\') # 3.1+ only\n \'a, b, c\'\n >>> \'{2}, {1}, {0}\'.format(\'a\', \'b\', \'c\')\n \'c, b, a\'\n >>> \'{2}, {1}, {0}\'.format(*\'abc\') # unpacking argument sequence\n \'c, b, a\'\n >>> \'{0}{1}{0}\'.format(\'abra\', \'cad\') # arguments\' indices can be repeated\n \'abracadabra\'\n\nAccessing arguments by name:\n\n >>> \'Coordinates: {latitude}, {longitude}\'.format(latitude=\'37.24N\', longitude=\'-115.81W\')\n \'Coordinates: 37.24N, -115.81W\'\n >>> coord = {\'latitude\': \'37.24N\', \'longitude\': \'-115.81W\'}\n >>> \'Coordinates: {latitude}, {longitude}\'.format(**coord)\n \'Coordinates: 37.24N, -115.81W\'\n\nAccessing arguments\' attributes:\n\n >>> c = 3-5j\n >>> (\'The complex number {0} is formed from the real part {0.real} \'\n ... \'and the imaginary part {0.imag}.\').format(c)\n \'The complex number (3-5j) is formed from the real part 3.0 and the imaginary part -5.0.\'\n >>> class Point:\n ... def __init__(self, x, y):\n ... self.x, self.y = x, y\n ... def __str__(self):\n ... return \'Point({self.x}, {self.y})\'.format(self=self)\n ...\n >>> str(Point(4, 2))\n \'Point(4, 2)\'\n\nAccessing arguments\' items:\n\n >>> coord = (3, 5)\n >>> \'X: {0[0]}; Y: {0[1]}\'.format(coord)\n \'X: 3; Y: 5\'\n\nReplacing ``%s`` and ``%r``:\n\n >>> "repr() shows quotes: {!r}; str() doesn\'t: {!s}".format(\'test1\', \'test2\')\n "repr() shows quotes: \'test1\'; str() doesn\'t: test2"\n\nAligning the text and specifying a width:\n\n >>> \'{:<30}\'.format(\'left aligned\')\n \'left aligned \'\n >>> \'{:>30}\'.format(\'right aligned\')\n \' right aligned\'\n >>> \'{:^30}\'.format(\'centered\')\n \' centered \'\n >>> \'{:*^30}\'.format(\'centered\') # use \'*\' as a fill char\n \'***********centered***********\'\n\nReplacing ``%+f``, ``%-f``, and ``% f`` and specifying a sign:\n\n >>> \'{:+f}; {:+f}\'.format(3.14, -3.14) # show it always\n \'+3.140000; -3.140000\'\n >>> \'{: f}; {: f}\'.format(3.14, -3.14) # show a space for positive numbers\n \' 3.140000; -3.140000\'\n >>> \'{:-f}; {:-f}\'.format(3.14, -3.14) # show only the minus -- same as \'{:f}; {:f}\'\n \'3.140000; -3.140000\'\n\nReplacing ``%x`` and ``%o`` and converting the value to different\nbases:\n\n >>> # format also supports binary numbers\n >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}".format(42)\n \'int: 42; hex: 2a; oct: 52; bin: 101010\'\n >>> # with 0x, 0o, or 0b as prefix:\n >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}".format(42)\n \'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010\'\n\nUsing the comma as a thousands separator:\n\n >>> \'{:,}\'.format(1234567890)\n \'1,234,567,890\'\n\nExpressing a percentage:\n\n >>> points = 19\n >>> total = 22\n >>> \'Correct answers: {:.2%}\'.format(points/total)\n \'Correct answers: 86.36%\'\n\nUsing type-specific formatting:\n\n >>> import datetime\n >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\n >>> \'{:%Y-%m-%d %H:%M:%S}\'.format(d)\n \'2010-07-04 12:15:58\'\n\nNesting arguments and more complex examples:\n\n >>> for align, text in zip(\'<^>\', [\'left\', \'center\', \'right\']):\n ... \'{0:{fill}{align}16}\'.format(text, fill=align, align=align)\n ...\n \'left<<<<<<<<<<<<\'\n \'^^^^^center^^^^^\'\n \'>>>>>>>>>>>right\'\n >>>\n >>> octets = [192, 168, 0, 1]\n >>> \'{:02X}{:02X}{:02X}{:02X}\'.format(*octets)\n \'C0A80001\'\n >>> int(_, 16)\n 3232235521\n >>>\n >>> width = 5\n >>> for num in range(5,12): #doctest: +NORMALIZE_WHITESPACE\n ... for base in \'dXob\':\n ... print(\'{0:{width}{base}}\'.format(num, base=base, width=width), end=\' \')\n ... print()\n ...\n 5 5 5 101\n 6 6 6 110\n 7 7 7 111\n 8 8 10 1000\n 9 9 11 1001\n 10 A 12 1010\n 11 B 13 1011\n', + 'function': '\nFunction definitions\n********************\n\nA function definition defines a user-defined function object (see\nsection *The standard type hierarchy*):\n\n funcdef ::= [decorators] "def" funcname "(" [parameter_list] ")" ["->" expression] ":" suite\n decorators ::= decorator+\n decorator ::= "@" dotted_name ["(" [parameter_list [","]] ")"] NEWLINE\n dotted_name ::= identifier ("." identifier)*\n parameter_list ::= (defparameter ",")*\n ( "*" [parameter] ("," defparameter)* ["," "**" parameter]\n | "**" parameter\n | defparameter [","] )\n parameter ::= identifier [":" expression]\n defparameter ::= parameter ["=" expression]\n funcname ::= identifier\n\nA function definition is an executable statement. Its execution binds\nthe function name in the current local namespace to a function object\n(a wrapper around the executable code for the function). This\nfunction object contains a reference to the current global namespace\nas the global namespace to be used when the function is called.\n\nThe function definition does not execute the function body; this gets\nexecuted only when the function is called. [3]\n\nA function definition may be wrapped by one or more *decorator*\nexpressions. Decorator expressions are evaluated when the function is\ndefined, in the scope that contains the function definition. The\nresult must be a callable, which is invoked with the function object\nas the only argument. The returned value is bound to the function name\ninstead of the function object. Multiple decorators are applied in\nnested fashion. For example, the following code\n\n @f1(arg)\n @f2\n def func(): pass\n\nis equivalent to\n\n def func(): pass\n func = f1(arg)(f2(func))\n\nWhen one or more *parameters* have the form *parameter* ``=``\n*expression*, the function is said to have "default parameter values."\nFor a parameter with a default value, the corresponding *argument* may\nbe omitted from a call, in which case the parameter\'s default value is\nsubstituted. If a parameter has a default value, all following\nparameters up until the "``*``" must also have a default value ---\nthis is a syntactic restriction that is not expressed by the grammar.\n\n**Default parameter values are evaluated from left to right when the\nfunction definition is executed.** This means that the expression is\nevaluated once, when the function is defined, and that the same "pre-\ncomputed" value is used for each call. This is especially important\nto understand when a default parameter is a mutable object, such as a\nlist or a dictionary: if the function modifies the object (e.g. by\nappending an item to a list), the default value is in effect modified.\nThis is generally not what was intended. A way around this is to use\n``None`` as the default, and explicitly test for it in the body of the\nfunction, e.g.:\n\n def whats_on_the_telly(penguin=None):\n if penguin is None:\n penguin = []\n penguin.append("property of the zoo")\n return penguin\n\nFunction call semantics are described in more detail in section\n*Calls*. A function call always assigns values to all parameters\nmentioned in the parameter list, either from position arguments, from\nkeyword arguments, or from default values. If the form\n"``*identifier``" is present, it is initialized to a tuple receiving\nany excess positional parameters, defaulting to the empty tuple. If\nthe form "``**identifier``" is present, it is initialized to a new\ndictionary receiving any excess keyword arguments, defaulting to a new\nempty dictionary. Parameters after "``*``" or "``*identifier``" are\nkeyword-only parameters and may only be passed used keyword arguments.\n\nParameters may have annotations of the form "``: expression``"\nfollowing the parameter name. Any parameter may have an annotation\neven those of the form ``*identifier`` or ``**identifier``. Functions\nmay have "return" annotation of the form "``-> expression``" after the\nparameter list. These annotations can be any valid Python expression\nand are evaluated when the function definition is executed.\nAnnotations may be evaluated in a different order than they appear in\nthe source code. The presence of annotations does not change the\nsemantics of a function. The annotation values are available as\nvalues of a dictionary keyed by the parameters\' names in the\n``__annotations__`` attribute of the function object.\n\nIt is also possible to create anonymous functions (functions not bound\nto a name), for immediate use in expressions. This uses lambda forms,\ndescribed in section *Lambdas*. Note that the lambda form is merely a\nshorthand for a simplified function definition; a function defined in\na "``def``" statement can be passed around or assigned to another name\njust like a function defined by a lambda form. The "``def``" form is\nactually more powerful since it allows the execution of multiple\nstatements and annotations.\n\n**Programmer\'s note:** Functions are first-class objects. A "``def``"\nform executed inside a function definition defines a local function\nthat can be returned or passed around. Free variables used in the\nnested function can access the local variables of the function\ncontaining the def. See section *Naming and binding* for details.\n\nSee also:\n\n **PEP 3107** - Function Annotations\n The original specification for function annotations.\n', 'global': '\nThe ``global`` statement\n************************\n\n global_stmt ::= "global" identifier ("," identifier)*\n\nThe ``global`` statement is a declaration which holds for the entire\ncurrent code block. It means that the listed identifiers are to be\ninterpreted as globals. It would be impossible to assign to a global\nvariable without ``global``, although free variables may refer to\nglobals without being declared global.\n\nNames listed in a ``global`` statement must not be used in the same\ncode block textually preceding that ``global`` statement.\n\nNames listed in a ``global`` statement must not be defined as formal\nparameters or in a ``for`` loop control target, ``class`` definition,\nfunction definition, or ``import`` statement.\n\n**CPython implementation detail:** The current implementation does not\nenforce the latter two restrictions, but programs should not abuse\nthis freedom, as future implementations may enforce them or silently\nchange the meaning of the program.\n\n**Programmer\'s note:** the ``global`` is a directive to the parser.\nIt applies only to code parsed at the same time as the ``global``\nstatement. In particular, a ``global`` statement contained in a string\nor code object supplied to the built-in ``exec()`` function does not\naffect the code block *containing* the function call, and code\ncontained in such a string is unaffected by ``global`` statements in\nthe code containing the function call. The same applies to the\n``eval()`` and ``compile()`` functions.\n', 'id-classes': '\nReserved classes of identifiers\n*******************************\n\nCertain classes of identifiers (besides keywords) have special\nmeanings. These classes are identified by the patterns of leading and\ntrailing underscore characters:\n\n``_*``\n Not imported by ``from module import *``. The special identifier\n ``_`` is used in the interactive interpreter to store the result of\n the last evaluation; it is stored in the ``builtins`` module. When\n not in interactive mode, ``_`` has no special meaning and is not\n defined. See section *The import statement*.\n\n Note: The name ``_`` is often used in conjunction with\n internationalization; refer to the documentation for the\n ``gettext`` module for more information on this convention.\n\n``__*__``\n System-defined names. These names are defined by the interpreter\n and its implementation (including the standard library). Current\n system names are discussed in the *Special method names* section\n and elsewhere. More will likely be defined in future versions of\n Python. *Any* use of ``__*__`` names, in any context, that does\n not follow explicitly documented use, is subject to breakage\n without warning.\n\n``__*``\n Class-private names. Names in this category, when used within the\n context of a class definition, are re-written to use a mangled form\n to help avoid name clashes between "private" attributes of base and\n derived classes. See section *Identifiers (Names)*.\n', 'identifiers': '\nIdentifiers and keywords\n************************\n\nIdentifiers (also referred to as *names*) are described by the\nfollowing lexical definitions.\n\nThe syntax of identifiers in Python is based on the Unicode standard\nannex UAX-31, with elaboration and changes as defined below; see also\n**PEP 3131** for further details.\n\nWithin the ASCII range (U+0001..U+007F), the valid characters for\nidentifiers are the same as in Python 2.x: the uppercase and lowercase\nletters ``A`` through ``Z``, the underscore ``_`` and, except for the\nfirst character, the digits ``0`` through ``9``.\n\nPython 3.0 introduces additional characters from outside the ASCII\nrange (see **PEP 3131**). For these characters, the classification\nuses the version of the Unicode Character Database as included in the\n``unicodedata`` module.\n\nIdentifiers are unlimited in length. Case is significant.\n\n identifier ::= xid_start xid_continue*\n id_start ::= \n id_continue ::= \n xid_start ::= \n xid_continue ::= \n\nThe Unicode category codes mentioned above stand for:\n\n* *Lu* - uppercase letters\n\n* *Ll* - lowercase letters\n\n* *Lt* - titlecase letters\n\n* *Lm* - modifier letters\n\n* *Lo* - other letters\n\n* *Nl* - letter numbers\n\n* *Mn* - nonspacing marks\n\n* *Mc* - spacing combining marks\n\n* *Nd* - decimal numbers\n\n* *Pc* - connector punctuations\n\n* *Other_ID_Start* - explicit list of characters in PropList.txt to\n support backwards compatibility\n\n* *Other_ID_Continue* - likewise\n\nAll identifiers are converted into the normal form NFKC while parsing;\ncomparison of identifiers is based on NFKC.\n\nA non-normative HTML file listing all valid identifier characters for\nUnicode 4.1 can be found at http://www.dcl.hpi.uni-\npotsdam.de/home/loewis/table-3131.html.\n\n\nKeywords\n========\n\nThe following identifiers are used as reserved words, or *keywords* of\nthe language, and cannot be used as ordinary identifiers. They must\nbe spelled exactly as written here:\n\n False class finally is return\n None continue for lambda try\n True def from nonlocal while\n and del global not with\n as elif if or yield\n assert else import pass\n break except in raise\n\n\nReserved classes of identifiers\n===============================\n\nCertain classes of identifiers (besides keywords) have special\nmeanings. These classes are identified by the patterns of leading and\ntrailing underscore characters:\n\n``_*``\n Not imported by ``from module import *``. The special identifier\n ``_`` is used in the interactive interpreter to store the result of\n the last evaluation; it is stored in the ``builtins`` module. When\n not in interactive mode, ``_`` has no special meaning and is not\n defined. See section *The import statement*.\n\n Note: The name ``_`` is often used in conjunction with\n internationalization; refer to the documentation for the\n ``gettext`` module for more information on this convention.\n\n``__*__``\n System-defined names. These names are defined by the interpreter\n and its implementation (including the standard library). Current\n system names are discussed in the *Special method names* section\n and elsewhere. More will likely be defined in future versions of\n Python. *Any* use of ``__*__`` names, in any context, that does\n not follow explicitly documented use, is subject to breakage\n without warning.\n\n``__*``\n Class-private names. Names in this category, when used within the\n context of a class definition, are re-written to use a mangled form\n to help avoid name clashes between "private" attributes of base and\n derived classes. See section *Identifiers (Names)*.\n', 'if': '\nThe ``if`` statement\n********************\n\nThe ``if`` statement is used for conditional execution:\n\n if_stmt ::= "if" expression ":" suite\n ( "elif" expression ":" suite )*\n ["else" ":" suite]\n\nIt selects exactly one of the suites by evaluating the expressions one\nby one until one is found to be true (see section *Boolean operations*\nfor the definition of true and false); then that suite is executed\n(and no other part of the ``if`` statement is executed or evaluated).\nIf all expressions are false, the suite of the ``else`` clause, if\npresent, is executed.\n', 'imaginary': '\nImaginary literals\n******************\n\nImaginary literals are described by the following lexical definitions:\n\n imagnumber ::= (floatnumber | intpart) ("j" | "J")\n\nAn imaginary literal yields a complex number with a real part of 0.0.\nComplex numbers are represented as a pair of floating point numbers\nand have the same restrictions on their range. To create a complex\nnumber with a nonzero real part, add a floating point number to it,\ne.g., ``(3+4j)``. Some examples of imaginary literals:\n\n 3.14j 10.j 10j .001j 1e100j 3.14e-10j\n', - 'import': '\nThe ``import`` statement\n************************\n\n import_stmt ::= "import" module ["as" name] ( "," module ["as" name] )*\n | "from" relative_module "import" identifier ["as" name]\n ( "," identifier ["as" name] )*\n | "from" relative_module "import" "(" identifier ["as" name]\n ( "," identifier ["as" name] )* [","] ")"\n | "from" module "import" "*"\n module ::= (identifier ".")* identifier\n relative_module ::= "."* module | "."+\n name ::= identifier\n\nThe basic import statement (no ``from`` clause) is executed in two\nsteps:\n\n1. find a module, loading and initializing it if necessary\n\n2. define a name or names in the local namespace for the scope where\n the ``import`` statement occurs.\n\nWhen the statement contains multiple clauses (separated by commas) the\ntwo steps are carried out separately for each clause, just as though\nthe clauses had been separated out into individiual import statements.\n\nThe details of the first step, finding and loading modules is\ndescribed in greater detail in the section on the *import system*,\nwhich also describes the various types of packages and modules that\ncan be imported, as well as all the hooks that can be used to\ncustomize the import system. Note that failures in this step may\nindicate either that the module could not be located, *or* that an\nerror occurred while initializing the module, which includes execution\nof the module\'s code.\n\nIf the requested module is retrieved successfully, it will be made\navailable in the local namespace in one of three ways:\n\n* If the module name is followed by ``as``, then the name following\n ``as`` is bound directly to the imported module.\n\n* If no other name is specified, and the module being imported is a\n top level module, the module\'s name is bound in the local namespace\n as a reference to the imported module\n\n* If the module being imported is *not* a top level module, then the\n name of the top level package that contains the module is bound in\n the local namespace as a reference to the top level package. The\n imported module must be accessed using its full qualified name\n rather than directly\n\nThe ``from`` form uses a slightly more complex process:\n\n1. find the module specified in the ``from`` clause loading and\n initializing it if necessary;\n\n2. for each of the identifiers specified in the ``import`` clauses:\n\n 1. check if the imported module has an attribute by that name\n\n 2. if not, attempt to import a submodule with that name and then\n check the imported module again for that attribute\n\n 3. if the attribute is not found, ``ImportError`` is raised.\n\n 4. otherwise, a reference to that value is bound in the local\n namespace, using the name in the ``as`` clause if it is present,\n otherwise using the attribute name\n\nExamples:\n\n import foo # foo imported and bound locally\n import foo.bar.baz # foo.bar.baz imported, foo bound locally\n import foo.bar.baz as fbb # foo.bar.baz imported and bound as fbb\n from foo.bar import baz # foo.bar.baz imported and bound as baz\n from foo import attr # foo imported and foo.attr bound as attr\n\nIf the list of identifiers is replaced by a star (``\'*\'``), all public\nnames defined in the module are bound in the local namespace for the\nscope where the ``import`` statement occurs.\n\nThe *public names* defined by a module are determined by checking the\nmodule\'s namespace for a variable named ``__all__``; if defined, it\nmust be a sequence of strings which are names defined or imported by\nthat module. The names given in ``__all__`` are all considered public\nand are required to exist. If ``__all__`` is not defined, the set of\npublic names includes all names found in the module\'s namespace which\ndo not begin with an underscore character (``\'_\'``). ``__all__``\nshould contain the entire public API. It is intended to avoid\naccidentally exporting items that are not part of the API (such as\nlibrary modules which were imported and used within the module).\n\nThe ``from`` form with ``*`` may only occur in a module scope.\nAttempting to use it in class or function definitions will raise a\n``SyntaxError``.\n\nThe *public names* defined by a module are determined by checking the\nmodule\'s namespace for a variable named ``__all__``; if defined, it\nmust be a sequence of strings which are names defined or imported by\nthat module. The names given in ``__all__`` are all considered public\nand are required to exist. If ``__all__`` is not defined, the set of\npublic names includes all names found in the module\'s namespace which\ndo not begin with an underscore character (``\'_\'``). ``__all__``\nshould contain the entire public API. It is intended to avoid\naccidentally exporting items that are not part of the API (such as\nlibrary modules which were imported and used within the module).\n\nThe ``from`` form with ``*`` may only occur in a module scope. The\nwild card form of import --- ``import *`` --- is only allowed at the\nmodule level. Attempting to use it in class or function definitions\nwill raise a ``SyntaxError``.\n\nWhen specifying what module to import you do not have to specify the\nabsolute name of the module. When a module or package is contained\nwithin another package it is possible to make a relative import within\nthe same top package without having to mention the package name. By\nusing leading dots in the specified module or package after ``from``\nyou can specify how high to traverse up the current package hierarchy\nwithout specifying exact names. One leading dot means the current\npackage where the module making the import exists. Two dots means up\none package level. Three dots is up two levels, etc. So if you execute\n``from . import mod`` from a module in the ``pkg`` package then you\nwill end up importing ``pkg.mod``. If you execute ``from ..subpkg2\nimport mod`` from within ``pkg.subpkg1`` you will import\n``pkg.subpkg2.mod``. The specification for relative imports is\ncontained within **PEP 328**.\n\n``importlib.import_module()`` is provided to support applications that\ndetermine which modules need to be loaded dynamically.\n\n\nFuture statements\n=================\n\nA *future statement* is a directive to the compiler that a particular\nmodule should be compiled using syntax or semantics that will be\navailable in a specified future release of Python. The future\nstatement is intended to ease migration to future versions of Python\nthat introduce incompatible changes to the language. It allows use of\nthe new features on a per-module basis before the release in which the\nfeature becomes standard.\n\n future_statement ::= "from" "__future__" "import" feature ["as" name]\n ("," feature ["as" name])*\n | "from" "__future__" "import" "(" feature ["as" name]\n ("," feature ["as" name])* [","] ")"\n feature ::= identifier\n name ::= identifier\n\nA future statement must appear near the top of the module. The only\nlines that can appear before a future statement are:\n\n* the module docstring (if any),\n\n* comments,\n\n* blank lines, and\n\n* other future statements.\n\nThe features recognized by Python 3.0 are ``absolute_import``,\n``division``, ``generators``, ``unicode_literals``,\n``print_function``, ``nested_scopes`` and ``with_statement``. They\nare all redundant because they are always enabled, and only kept for\nbackwards compatibility.\n\nA future statement is recognized and treated specially at compile\ntime: Changes to the semantics of core constructs are often\nimplemented by generating different code. It may even be the case\nthat a new feature introduces new incompatible syntax (such as a new\nreserved word), in which case the compiler may need to parse the\nmodule differently. Such decisions cannot be pushed off until\nruntime.\n\nFor any given release, the compiler knows which feature names have\nbeen defined, and raises a compile-time error if a future statement\ncontains a feature not known to it.\n\nThe direct runtime semantics are the same as for any import statement:\nthere is a standard module ``__future__``, described later, and it\nwill be imported in the usual way at the time the future statement is\nexecuted.\n\nThe interesting runtime semantics depend on the specific feature\nenabled by the future statement.\n\nNote that there is nothing special about the statement:\n\n import __future__ [as name]\n\nThat is not a future statement; it\'s an ordinary import statement with\nno special semantics or syntax restrictions.\n\nCode compiled by calls to the built-in functions ``exec()`` and\n``compile()`` that occur in a module ``M`` containing a future\nstatement will, by default, use the new syntax or semantics associated\nwith the future statement. This can be controlled by optional\narguments to ``compile()`` --- see the documentation of that function\nfor details.\n\nA future statement typed at an interactive interpreter prompt will\ntake effect for the rest of the interpreter session. If an\ninterpreter is started with the *-i* option, is passed a script name\nto execute, and the script includes a future statement, it will be in\neffect in the interactive session started after the script is\nexecuted.\n\nSee also:\n\n **PEP 236** - Back to the __future__\n The original proposal for the __future__ mechanism.\n', + 'import': '\nThe ``import`` statement\n************************\n\n import_stmt ::= "import" module ["as" name] ( "," module ["as" name] )*\n | "from" relative_module "import" identifier ["as" name]\n ( "," identifier ["as" name] )*\n | "from" relative_module "import" "(" identifier ["as" name]\n ( "," identifier ["as" name] )* [","] ")"\n | "from" module "import" "*"\n module ::= (identifier ".")* identifier\n relative_module ::= "."* module | "."+\n name ::= identifier\n\nThe basic import statement (no ``from`` clause) is executed in two\nsteps:\n\n1. find a module, loading and initializing it if necessary\n\n2. define a name or names in the local namespace for the scope where\n the ``import`` statement occurs.\n\nWhen the statement contains multiple clauses (separated by commas) the\ntwo steps are carried out separately for each clause, just as though\nthe clauses had been separated out into individiual import statements.\n\nThe details of the first step, finding and loading modules is\ndescribed in greater detail in the section on the *import system*,\nwhich also describes the various types of packages and modules that\ncan be imported, as well as all the hooks that can be used to\ncustomize the import system. Note that failures in this step may\nindicate either that the module could not be located, *or* that an\nerror occurred while initializing the module, which includes execution\nof the module\'s code.\n\nIf the requested module is retrieved successfully, it will be made\navailable in the local namespace in one of three ways:\n\n* If the module name is followed by ``as``, then the name following\n ``as`` is bound directly to the imported module.\n\n* If no other name is specified, and the module being imported is a\n top level module, the module\'s name is bound in the local namespace\n as a reference to the imported module\n\n* If the module being imported is *not* a top level module, then the\n name of the top level package that contains the module is bound in\n the local namespace as a reference to the top level package. The\n imported module must be accessed using its full qualified name\n rather than directly\n\nThe ``from`` form uses a slightly more complex process:\n\n1. find the module specified in the ``from`` clause loading and\n initializing it if necessary;\n\n2. for each of the identifiers specified in the ``import`` clauses:\n\n 1. check if the imported module has an attribute by that name\n\n 2. if not, attempt to import a submodule with that name and then\n check the imported module again for that attribute\n\n 3. if the attribute is not found, ``ImportError`` is raised.\n\n 4. otherwise, a reference to that value is bound in the local\n namespace, using the name in the ``as`` clause if it is present,\n otherwise using the attribute name\n\nExamples:\n\n import foo # foo imported and bound locally\n import foo.bar.baz # foo.bar.baz imported, foo bound locally\n import foo.bar.baz as fbb # foo.bar.baz imported and bound as fbb\n from foo.bar import baz # foo.bar.baz imported and bound as baz\n from foo import attr # foo imported and foo.attr bound as attr\n\nIf the list of identifiers is replaced by a star (``\'*\'``), all public\nnames defined in the module are bound in the local namespace for the\nscope where the ``import`` statement occurs.\n\nThe *public names* defined by a module are determined by checking the\nmodule\'s namespace for a variable named ``__all__``; if defined, it\nmust be a sequence of strings which are names defined or imported by\nthat module. The names given in ``__all__`` are all considered public\nand are required to exist. If ``__all__`` is not defined, the set of\npublic names includes all names found in the module\'s namespace which\ndo not begin with an underscore character (``\'_\'``). ``__all__``\nshould contain the entire public API. It is intended to avoid\naccidentally exporting items that are not part of the API (such as\nlibrary modules which were imported and used within the module).\n\nThe ``from`` form with ``*`` may only occur in a module scope. The\nwild card form of import --- ``import *`` --- is only allowed at the\nmodule level. Attempting to use it in class or function definitions\nwill raise a ``SyntaxError``.\n\nWhen specifying what module to import you do not have to specify the\nabsolute name of the module. When a module or package is contained\nwithin another package it is possible to make a relative import within\nthe same top package without having to mention the package name. By\nusing leading dots in the specified module or package after ``from``\nyou can specify how high to traverse up the current package hierarchy\nwithout specifying exact names. One leading dot means the current\npackage where the module making the import exists. Two dots means up\none package level. Three dots is up two levels, etc. So if you execute\n``from . import mod`` from a module in the ``pkg`` package then you\nwill end up importing ``pkg.mod``. If you execute ``from ..subpkg2\nimport mod`` from within ``pkg.subpkg1`` you will import\n``pkg.subpkg2.mod``. The specification for relative imports is\ncontained within **PEP 328**.\n\n``importlib.import_module()`` is provided to support applications that\ndetermine which modules need to be loaded dynamically.\n\n\nFuture statements\n=================\n\nA *future statement* is a directive to the compiler that a particular\nmodule should be compiled using syntax or semantics that will be\navailable in a specified future release of Python. The future\nstatement is intended to ease migration to future versions of Python\nthat introduce incompatible changes to the language. It allows use of\nthe new features on a per-module basis before the release in which the\nfeature becomes standard.\n\n future_statement ::= "from" "__future__" "import" feature ["as" name]\n ("," feature ["as" name])*\n | "from" "__future__" "import" "(" feature ["as" name]\n ("," feature ["as" name])* [","] ")"\n feature ::= identifier\n name ::= identifier\n\nA future statement must appear near the top of the module. The only\nlines that can appear before a future statement are:\n\n* the module docstring (if any),\n\n* comments,\n\n* blank lines, and\n\n* other future statements.\n\nThe features recognized by Python 3.0 are ``absolute_import``,\n``division``, ``generators``, ``unicode_literals``,\n``print_function``, ``nested_scopes`` and ``with_statement``. They\nare all redundant because they are always enabled, and only kept for\nbackwards compatibility.\n\nA future statement is recognized and treated specially at compile\ntime: Changes to the semantics of core constructs are often\nimplemented by generating different code. It may even be the case\nthat a new feature introduces new incompatible syntax (such as a new\nreserved word), in which case the compiler may need to parse the\nmodule differently. Such decisions cannot be pushed off until\nruntime.\n\nFor any given release, the compiler knows which feature names have\nbeen defined, and raises a compile-time error if a future statement\ncontains a feature not known to it.\n\nThe direct runtime semantics are the same as for any import statement:\nthere is a standard module ``__future__``, described later, and it\nwill be imported in the usual way at the time the future statement is\nexecuted.\n\nThe interesting runtime semantics depend on the specific feature\nenabled by the future statement.\n\nNote that there is nothing special about the statement:\n\n import __future__ [as name]\n\nThat is not a future statement; it\'s an ordinary import statement with\nno special semantics or syntax restrictions.\n\nCode compiled by calls to the built-in functions ``exec()`` and\n``compile()`` that occur in a module ``M`` containing a future\nstatement will, by default, use the new syntax or semantics associated\nwith the future statement. This can be controlled by optional\narguments to ``compile()`` --- see the documentation of that function\nfor details.\n\nA future statement typed at an interactive interpreter prompt will\ntake effect for the rest of the interpreter session. If an\ninterpreter is started with the *-i* option, is passed a script name\nto execute, and the script includes a future statement, it will be in\neffect in the interactive session started after the script is\nexecuted.\n\nSee also:\n\n **PEP 236** - Back to the __future__\n The original proposal for the __future__ mechanism.\n', 'in': '\nComparisons\n***********\n\nUnlike C, all comparison operations in Python have the same priority,\nwhich is lower than that of any arithmetic, shifting or bitwise\noperation. Also unlike C, expressions like ``a < b < c`` have the\ninterpretation that is conventional in mathematics:\n\n comparison ::= or_expr ( comp_operator or_expr )*\n comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n | "is" ["not"] | ["not"] "in"\n\nComparisons yield boolean values: ``True`` or ``False``.\n\nComparisons can be chained arbitrarily, e.g., ``x < y <= z`` is\nequivalent to ``x < y and y <= z``, except that ``y`` is evaluated\nonly once (but in both cases ``z`` is not evaluated at all when ``x <\ny`` is found to be false).\n\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\n*op2*, ..., *opN* are comparison operators, then ``a op1 b op2 c ... y\nopN z`` is equivalent to ``a op1 b and b op2 c and ... y opN z``,\nexcept that each expression is evaluated at most once.\n\nNote that ``a op1 b op2 c`` doesn\'t imply any kind of comparison\nbetween *a* and *c*, so that, e.g., ``x < y > z`` is perfectly legal\n(though perhaps not pretty).\n\nThe operators ``<``, ``>``, ``==``, ``>=``, ``<=``, and ``!=`` compare\nthe values of two objects. The objects need not have the same type.\nIf both are numbers, they are converted to a common type. Otherwise,\nthe ``==`` and ``!=`` operators *always* consider objects of different\ntypes to be unequal, while the ``<``, ``>``, ``>=`` and ``<=``\noperators raise a ``TypeError`` when comparing objects of different\ntypes that do not implement these operators for the given pair of\ntypes. You can control comparison behavior of objects of non-built-in\ntypes by defining rich comparison methods like ``__gt__()``, described\nin section *Basic customization*.\n\nComparison of objects of the same type depends on the type:\n\n* Numbers are compared arithmetically.\n\n* The values ``float(\'NaN\')`` and ``Decimal(\'NaN\')`` are special. The\n are identical to themselves, ``x is x`` but are not equal to\n themselves, ``x != x``. Additionally, comparing any value to a\n not-a-number value will return ``False``. For example, both ``3 <\n float(\'NaN\')`` and ``float(\'NaN\') < 3`` will return ``False``.\n\n* Bytes objects are compared lexicographically using the numeric\n values of their elements.\n\n* Strings are compared lexicographically using the numeric equivalents\n (the result of the built-in function ``ord()``) of their characters.\n [3] String and bytes object can\'t be compared!\n\n* Tuples and lists are compared lexicographically using comparison of\n corresponding elements. This means that to compare equal, each\n element must compare equal and the two sequences must be of the same\n type and have the same length.\n\n If not equal, the sequences are ordered the same as their first\n differing elements. For example, ``[1,2,x] <= [1,2,y]`` has the\n same value as ``x <= y``. If the corresponding element does not\n exist, the shorter sequence is ordered first (for example, ``[1,2] <\n [1,2,3]``).\n\n* Mappings (dictionaries) compare equal if and only if they have the\n same ``(key, value)`` pairs. Order comparisons ``(\'<\', \'<=\', \'>=\',\n \'>\')`` raise ``TypeError``.\n\n* Sets and frozensets define comparison operators to mean subset and\n superset tests. Those relations do not define total orderings (the\n two sets ``{1,2}`` and {2,3} are not equal, nor subsets of one\n another, nor supersets of one another). Accordingly, sets are not\n appropriate arguments for functions which depend on total ordering.\n For example, ``min()``, ``max()``, and ``sorted()`` produce\n undefined results given a list of sets as inputs.\n\n* Most other objects of built-in types compare unequal unless they are\n the same object; the choice whether one object is considered smaller\n or larger than another one is made arbitrarily but consistently\n within one execution of a program.\n\nComparison of objects of the differing types depends on whether either\nof the types provide explicit support for the comparison. Most\nnumeric types can be compared with one another. When cross-type\ncomparison is not supported, the comparison method returns\n``NotImplemented``.\n\nThe operators ``in`` and ``not in`` test for membership. ``x in s``\nevaluates to true if *x* is a member of *s*, and false otherwise. ``x\nnot in s`` returns the negation of ``x in s``. All built-in sequences\nand set types support this as well as dictionary, for which ``in``\ntests whether a the dictionary has a given key. For container types\nsuch as list, tuple, set, frozenset, dict, or collections.deque, the\nexpression ``x in y`` is equivalent to ``any(x is e or x == e for e in\ny)``.\n\nFor the string and bytes types, ``x in y`` is true if and only if *x*\nis a substring of *y*. An equivalent test is ``y.find(x) != -1``.\nEmpty strings are always considered to be a substring of any other\nstring, so ``"" in "abc"`` will return ``True``.\n\nFor user-defined classes which define the ``__contains__()`` method,\n``x in y`` is true if and only if ``y.__contains__(x)`` is true.\n\nFor user-defined classes which do not define ``__contains__()`` but do\ndefine ``__iter__()``, ``x in y`` is true if some value ``z`` with ``x\n== z`` is produced while iterating over ``y``. If an exception is\nraised during the iteration, it is as if ``in`` raised that exception.\n\nLastly, the old-style iteration protocol is tried: if a class defines\n``__getitem__()``, ``x in y`` is true if and only if there is a non-\nnegative integer index *i* such that ``x == y[i]``, and all lower\ninteger indices do not raise ``IndexError`` exception. (If any other\nexception is raised, it is as if ``in`` raised that exception).\n\nThe operator ``not in`` is defined to have the inverse true value of\n``in``.\n\nThe operators ``is`` and ``is not`` test for object identity: ``x is\ny`` is true if and only if *x* and *y* are the same object. ``x is\nnot y`` yields the inverse truth value. [4]\n', 'integers': '\nInteger literals\n****************\n\nInteger literals are described by the following lexical definitions:\n\n integer ::= decimalinteger | octinteger | hexinteger | bininteger\n decimalinteger ::= nonzerodigit digit* | "0"+\n nonzerodigit ::= "1"..."9"\n digit ::= "0"..."9"\n octinteger ::= "0" ("o" | "O") octdigit+\n hexinteger ::= "0" ("x" | "X") hexdigit+\n bininteger ::= "0" ("b" | "B") bindigit+\n octdigit ::= "0"..."7"\n hexdigit ::= digit | "a"..."f" | "A"..."F"\n bindigit ::= "0" | "1"\n\nThere is no limit for the length of integer literals apart from what\ncan be stored in available memory.\n\nNote that leading zeros in a non-zero decimal number are not allowed.\nThis is for disambiguation with C-style octal literals, which Python\nused before version 3.0.\n\nSome examples of integer literals:\n\n 7 2147483647 0o177 0b100110111\n 3 79228162514264337593543950336 0o377 0x100000000\n 79228162514264337593543950336 0xdeadbeef\n', 'lambda': '\nLambdas\n*******\n\n lambda_form ::= "lambda" [parameter_list]: expression\n lambda_form_nocond ::= "lambda" [parameter_list]: expression_nocond\n\nLambda forms (lambda expressions) have the same syntactic position as\nexpressions. They are a shorthand to create anonymous functions; the\nexpression ``lambda arguments: expression`` yields a function object.\nThe unnamed object behaves like a function object defined with\n\n def (arguments):\n return expression\n\nSee section *Function definitions* for the syntax of parameter lists.\nNote that functions created with lambda forms cannot contain\nstatements or annotations.\n', @@ -51,17 +51,17 @@ 'numbers': "\nNumeric literals\n****************\n\nThere are three types of numeric literals: integers, floating point\nnumbers, and imaginary numbers. There are no complex literals\n(complex numbers can be formed by adding a real number and an\nimaginary number).\n\nNote that numeric literals do not include a sign; a phrase like ``-1``\nis actually an expression composed of the unary operator '``-``' and\nthe literal ``1``.\n", 'numeric-types': "\nEmulating numeric types\n***********************\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__truediv__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\n ``|``). For instance, to evaluate the expression ``x + y``, where\n *x* is an instance of a class that has an ``__add__()`` method,\n ``x.__add__(y)`` is called. The ``__divmod__()`` method should be\n the equivalent to using ``__floordiv__()`` and ``__mod__()``; it\n should not be related to ``__truediv__()``. Note that\n ``__pow__()`` should be defined to accept an optional third\n argument if the ternary version of the built-in ``pow()`` function\n is to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return ``NotImplemented``.\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\n ``|``) with reflected (swapped) operands. These functions are only\n called if the left operand does not support the corresponding\n operation and the operands are of different types. [2] For\n instance, to evaluate the expression ``x - y``, where *y* is an\n instance of a class that has an ``__rsub__()`` method,\n ``y.__rsub__(x)`` is called if ``x.__sub__(y)`` returns\n *NotImplemented*.\n\n Note that ternary ``pow()`` will not try calling ``__rpow__()``\n (the coercion rules would become too complicated).\n\n Note: If the right operand's type is a subclass of the left operand's\n type and that subclass provides the reflected method for the\n operation, this method will be called before the left operand's\n non-reflected method. This behavior allows subclasses to\n override their ancestors' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments (``+=``, ``-=``, ``*=``, ``/=``, ``//=``, ``%=``,\n ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods\n should attempt to do the operation in-place (modifying *self*) and\n return the result (which could be, but does not have to be,\n *self*). If a specific method is not defined, the augmented\n assignment falls back to the normal methods. For instance, to\n execute the statement ``x += y``, where *x* is an instance of a\n class that has an ``__iadd__()`` method, ``x.__iadd__(y)`` is\n called. If *x* is an instance of a class that does not define a\n ``__iadd__()`` method, ``x.__add__(y)`` and ``y.__radd__(x)`` are\n considered, as with the evaluation of ``x + y``.\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations (``-``, ``+``,\n ``abs()`` and ``~``).\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__float__(self)\nobject.__round__(self[, n])\n\n Called to implement the built-in functions ``complex()``,\n ``int()``, ``float()`` and ``round()``. Should return a value of\n the appropriate type.\n\nobject.__index__(self)\n\n Called to implement ``operator.index()``. Also called whenever\n Python needs an integer object (such as in slicing, or in the\n built-in ``bin()``, ``hex()`` and ``oct()`` functions). Must return\n an integer.\n", 'objects': '\nObjects, values and types\n*************************\n\n*Objects* are Python\'s abstraction for data. All data in a Python\nprogram is represented by objects or by relations between objects. (In\na sense, and in conformance to Von Neumann\'s model of a "stored\nprogram computer," code is also represented by objects.)\n\nEvery object has an identity, a type and a value. An object\'s\n*identity* never changes once it has been created; you may think of it\nas the object\'s address in memory. The \'``is``\' operator compares the\nidentity of two objects; the ``id()`` function returns an integer\nrepresenting its identity.\n\n**CPython implementation detail:** For CPython, ``id(x)`` is the\nmemory address where ``x`` is stored.\n\nAn object\'s type determines the operations that the object supports\n(e.g., "does it have a length?") and also defines the possible values\nfor objects of that type. The ``type()`` function returns an object\'s\ntype (which is an object itself). Like its identity, an object\'s\n*type* is also unchangeable. [1]\n\nThe *value* of some objects can change. Objects whose value can\nchange are said to be *mutable*; objects whose value is unchangeable\nonce they are created are called *immutable*. (The value of an\nimmutable container object that contains a reference to a mutable\nobject can change when the latter\'s value is changed; however the\ncontainer is still considered immutable, because the collection of\nobjects it contains cannot be changed. So, immutability is not\nstrictly the same as having an unchangeable value, it is more subtle.)\nAn object\'s mutability is determined by its type; for instance,\nnumbers, strings and tuples are immutable, while dictionaries and\nlists are mutable.\n\nObjects are never explicitly destroyed; however, when they become\nunreachable they may be garbage-collected. An implementation is\nallowed to postpone garbage collection or omit it altogether --- it is\na matter of implementation quality how garbage collection is\nimplemented, as long as no objects are collected that are still\nreachable.\n\n**CPython implementation detail:** CPython currently uses a reference-\ncounting scheme with (optional) delayed detection of cyclically linked\ngarbage, which collects most objects as soon as they become\nunreachable, but is not guaranteed to collect garbage containing\ncircular references. See the documentation of the ``gc`` module for\ninformation on controlling the collection of cyclic garbage. Other\nimplementations act differently and CPython may change. Do not depend\non immediate finalization of objects when they become unreachable (ex:\nalways close files).\n\nNote that the use of the implementation\'s tracing or debugging\nfacilities may keep objects alive that would normally be collectable.\nAlso note that catching an exception with a \'``try``...``except``\'\nstatement may keep objects alive.\n\nSome objects contain references to "external" resources such as open\nfiles or windows. It is understood that these resources are freed\nwhen the object is garbage-collected, but since garbage collection is\nnot guaranteed to happen, such objects also provide an explicit way to\nrelease the external resource, usually a ``close()`` method. Programs\nare strongly recommended to explicitly close such objects. The\n\'``try``...``finally``\' statement and the \'``with``\' statement provide\nconvenient ways to do this.\n\nSome objects contain references to other objects; these are called\n*containers*. Examples of containers are tuples, lists and\ndictionaries. The references are part of a container\'s value. In\nmost cases, when we talk about the value of a container, we imply the\nvalues, not the identities of the contained objects; however, when we\ntalk about the mutability of a container, only the identities of the\nimmediately contained objects are implied. So, if an immutable\ncontainer (like a tuple) contains a reference to a mutable object, its\nvalue changes if that mutable object is changed.\n\nTypes affect almost all aspects of object behavior. Even the\nimportance of object identity is affected in some sense: for immutable\ntypes, operations that compute new values may actually return a\nreference to any existing object with the same type and value, while\nfor mutable objects this is not allowed. E.g., after ``a = 1; b =\n1``, ``a`` and ``b`` may or may not refer to the same object with the\nvalue one, depending on the implementation, but after ``c = []; d =\n[]``, ``c`` and ``d`` are guaranteed to refer to two different,\nunique, newly created empty lists. (Note that ``c = d = []`` assigns\nthe same object to both ``c`` and ``d``.)\n', - 'operator-summary': '\nOperator precedence\n*******************\n\nThe following table summarizes the operator precedences in Python,\nfrom lowest precedence (least binding) to highest precedence (most\nbinding). Operators in the same box have the same precedence. Unless\nthe syntax is explicitly given, operators are binary. Operators in\nthe same box group left to right (except for comparisons, including\ntests, which all have the same precedence and chain from left to right\n--- see section *Comparisons* --- and exponentiation, which groups\nfrom right to left).\n\n+-------------------------------------------------+---------------------------------------+\n| Operator | Description |\n+=================================================+=======================================+\n| ``lambda`` | Lambda expression |\n+-------------------------------------------------+---------------------------------------+\n| ``if`` -- ``else`` | Conditional expression |\n+-------------------------------------------------+---------------------------------------+\n| ``or`` | Boolean OR |\n+-------------------------------------------------+---------------------------------------+\n| ``and`` | Boolean AND |\n+-------------------------------------------------+---------------------------------------+\n| ``not`` ``x`` | Boolean NOT |\n+-------------------------------------------------+---------------------------------------+\n| ``in``, ``not in``, ``is``, ``is not``, ``<``, | Comparisons, including membership |\n| ``<=``, ``>``, ``>=``, ``!=``, ``==`` | tests and identity tests, |\n+-------------------------------------------------+---------------------------------------+\n| ``|`` | Bitwise OR |\n+-------------------------------------------------+---------------------------------------+\n| ``^`` | Bitwise XOR |\n+-------------------------------------------------+---------------------------------------+\n| ``&`` | Bitwise AND |\n+-------------------------------------------------+---------------------------------------+\n| ``<<``, ``>>`` | Shifts |\n+-------------------------------------------------+---------------------------------------+\n| ``+``, ``-`` | Addition and subtraction |\n+-------------------------------------------------+---------------------------------------+\n| ``*``, ``/``, ``//``, ``%`` | Multiplication, division, remainder |\n| | [5] |\n+-------------------------------------------------+---------------------------------------+\n| ``+x``, ``-x``, ``~x`` | Positive, negative, bitwise NOT |\n+-------------------------------------------------+---------------------------------------+\n| ``**`` | Exponentiation [6] |\n+-------------------------------------------------+---------------------------------------+\n| ``x[index]``, ``x[index:index]``, | Subscription, slicing, call, |\n| ``x(arguments...)``, ``x.attribute`` | attribute reference |\n+-------------------------------------------------+---------------------------------------+\n| ``(expressions...)``, ``[expressions...]``, | Binding or tuple display, list |\n| ``{key: value...}``, ``{expressions...}`` | display, dictionary display, set |\n| | display |\n+-------------------------------------------------+---------------------------------------+\n\n-[ Footnotes ]-\n\n[1] While ``abs(x%y) < abs(y)`` is true mathematically, for floats it\n may not be true numerically due to roundoff. For example, and\n assuming a platform on which a Python float is an IEEE 754 double-\n precision number, in order that ``-1e-100 % 1e100`` have the same\n sign as ``1e100``, the computed result is ``-1e-100 + 1e100``,\n which is numerically exactly equal to ``1e100``. The function\n ``math.fmod()`` returns a result whose sign matches the sign of\n the first argument instead, and so returns ``-1e-100`` in this\n case. Which approach is more appropriate depends on the\n application.\n\n[2] If x is very close to an exact integer multiple of y, it\'s\n possible for ``x//y`` to be one larger than ``(x-x%y)//y`` due to\n rounding. In such cases, Python returns the latter result, in\n order to preserve that ``divmod(x,y)[0] * y + x % y`` be very\n close to ``x``.\n\n[3] While comparisons between strings make sense at the byte level,\n they may be counter-intuitive to users. For example, the strings\n ``"\\u00C7"`` and ``"\\u0327\\u0043"`` compare differently, even\n though they both represent the same unicode character (LATIN\n CAPITAL LETTER C WITH CEDILLA). To compare strings in a human\n recognizable way, compare using ``unicodedata.normalize()``.\n\n[4] Due to automatic garbage-collection, free lists, and the dynamic\n nature of descriptors, you may notice seemingly unusual behaviour\n in certain uses of the ``is`` operator, like those involving\n comparisons between instance methods, or constants. Check their\n documentation for more info.\n\n[5] The ``%`` operator is also used for string formatting; the same\n precedence applies.\n\n[6] The power operator ``**`` binds less tightly than an arithmetic or\n bitwise unary operator on its right, that is, ``2**-1`` is\n ``0.5``.\n', + 'operator-summary': '\nOperator precedence\n*******************\n\nThe following table summarizes the operator precedences in Python,\nfrom lowest precedence (least binding) to highest precedence (most\nbinding). Operators in the same box have the same precedence. Unless\nthe syntax is explicitly given, operators are binary. Operators in\nthe same box group left to right (except for comparisons, including\ntests, which all have the same precedence and chain from left to right\n--- see section *Comparisons* --- and exponentiation, which groups\nfrom right to left).\n\n+-------------------------------------------------+---------------------------------------+\n| Operator | Description |\n+=================================================+=======================================+\n| ``lambda`` | Lambda expression |\n+-------------------------------------------------+---------------------------------------+\n| ``if`` -- ``else`` | Conditional expression |\n+-------------------------------------------------+---------------------------------------+\n| ``or`` | Boolean OR |\n+-------------------------------------------------+---------------------------------------+\n| ``and`` | Boolean AND |\n+-------------------------------------------------+---------------------------------------+\n| ``not`` ``x`` | Boolean NOT |\n+-------------------------------------------------+---------------------------------------+\n| ``in``, ``not in``, ``is``, ``is not``, ``<``, | Comparisons, including membership |\n| ``<=``, ``>``, ``>=``, ``!=``, ``==`` | tests and identity tests |\n+-------------------------------------------------+---------------------------------------+\n| ``|`` | Bitwise OR |\n+-------------------------------------------------+---------------------------------------+\n| ``^`` | Bitwise XOR |\n+-------------------------------------------------+---------------------------------------+\n| ``&`` | Bitwise AND |\n+-------------------------------------------------+---------------------------------------+\n| ``<<``, ``>>`` | Shifts |\n+-------------------------------------------------+---------------------------------------+\n| ``+``, ``-`` | Addition and subtraction |\n+-------------------------------------------------+---------------------------------------+\n| ``*``, ``/``, ``//``, ``%`` | Multiplication, division, remainder |\n| | [5] |\n+-------------------------------------------------+---------------------------------------+\n| ``+x``, ``-x``, ``~x`` | Positive, negative, bitwise NOT |\n+-------------------------------------------------+---------------------------------------+\n| ``**`` | Exponentiation [6] |\n+-------------------------------------------------+---------------------------------------+\n| ``x[index]``, ``x[index:index]``, | Subscription, slicing, call, |\n| ``x(arguments...)``, ``x.attribute`` | attribute reference |\n+-------------------------------------------------+---------------------------------------+\n| ``(expressions...)``, ``[expressions...]``, | Binding or tuple display, list |\n| ``{key: value...}``, ``{expressions...}`` | display, dictionary display, set |\n| | display |\n+-------------------------------------------------+---------------------------------------+\n\n-[ Footnotes ]-\n\n[1] While ``abs(x%y) < abs(y)`` is true mathematically, for floats it\n may not be true numerically due to roundoff. For example, and\n assuming a platform on which a Python float is an IEEE 754 double-\n precision number, in order that ``-1e-100 % 1e100`` have the same\n sign as ``1e100``, the computed result is ``-1e-100 + 1e100``,\n which is numerically exactly equal to ``1e100``. The function\n ``math.fmod()`` returns a result whose sign matches the sign of\n the first argument instead, and so returns ``-1e-100`` in this\n case. Which approach is more appropriate depends on the\n application.\n\n[2] If x is very close to an exact integer multiple of y, it\'s\n possible for ``x//y`` to be one larger than ``(x-x%y)//y`` due to\n rounding. In such cases, Python returns the latter result, in\n order to preserve that ``divmod(x,y)[0] * y + x % y`` be very\n close to ``x``.\n\n[3] While comparisons between strings make sense at the byte level,\n they may be counter-intuitive to users. For example, the strings\n ``"\\u00C7"`` and ``"\\u0327\\u0043"`` compare differently, even\n though they both represent the same unicode character (LATIN\n CAPITAL LETTER C WITH CEDILLA). To compare strings in a human\n recognizable way, compare using ``unicodedata.normalize()``.\n\n[4] Due to automatic garbage-collection, free lists, and the dynamic\n nature of descriptors, you may notice seemingly unusual behaviour\n in certain uses of the ``is`` operator, like those involving\n comparisons between instance methods, or constants. Check their\n documentation for more info.\n\n[5] The ``%`` operator is also used for string formatting; the same\n precedence applies.\n\n[6] The power operator ``**`` binds less tightly than an arithmetic or\n bitwise unary operator on its right, that is, ``2**-1`` is\n ``0.5``.\n', 'pass': '\nThe ``pass`` statement\n**********************\n\n pass_stmt ::= "pass"\n\n``pass`` is a null operation --- when it is executed, nothing happens.\nIt is useful as a placeholder when a statement is required\nsyntactically, but no code needs to be executed, for example:\n\n def f(arg): pass # a function that does nothing (yet)\n\n class C: pass # a class with no methods (yet)\n', 'power': '\nThe power operator\n******************\n\nThe power operator binds more tightly than unary operators on its\nleft; it binds less tightly than unary operators on its right. The\nsyntax is:\n\n power ::= primary ["**" u_expr]\n\nThus, in an unparenthesized sequence of power and unary operators, the\noperators are evaluated from right to left (this does not constrain\nthe evaluation order for the operands): ``-1**2`` results in ``-1``.\n\nThe power operator has the same semantics as the built-in ``pow()``\nfunction, when called with two arguments: it yields its left argument\nraised to the power of its right argument. The numeric arguments are\nfirst converted to a common type, and the result is of that type.\n\nFor int operands, the result has the same type as the operands unless\nthe second argument is negative; in that case, all arguments are\nconverted to float and a float result is delivered. For example,\n``10**2`` returns ``100``, but ``10**-2`` returns ``0.01``.\n\nRaising ``0.0`` to a negative power results in a\n``ZeroDivisionError``. Raising a negative number to a fractional power\nresults in a ``complex`` number. (In earlier versions it raised a\n``ValueError``.)\n', 'raise': '\nThe ``raise`` statement\n***********************\n\n raise_stmt ::= "raise" [expression ["from" expression]]\n\nIf no expressions are present, ``raise`` re-raises the last exception\nthat was active in the current scope. If no exception is active in\nthe current scope, a ``RuntimeError`` exception is raised indicating\nthat this is an error.\n\nOtherwise, ``raise`` evaluates the first expression as the exception\nobject. It must be either a subclass or an instance of\n``BaseException``. If it is a class, the exception instance will be\nobtained when needed by instantiating the class with no arguments.\n\nThe *type* of the exception is the exception instance\'s class, the\n*value* is the instance itself.\n\nA traceback object is normally created automatically when an exception\nis raised and attached to it as the ``__traceback__`` attribute, which\nis writable. You can create an exception and set your own traceback in\none step using the ``with_traceback()`` exception method (which\nreturns the same exception instance, with its traceback set to its\nargument), like so:\n\n raise Exception("foo occurred").with_traceback(tracebackobj)\n\nThe ``from`` clause is used for exception chaining: if given, the\nsecond *expression* must be another exception class or instance, which\nwill then be attached to the raised exception as the ``__cause__``\nattribute (which is writable). If the raised exception is not\nhandled, both exceptions will be printed:\n\n >>> try:\n ... print(1 / 0)\n ... except Exception as exc:\n ... raise RuntimeError("Something bad happened") from exc\n ...\n Traceback (most recent call last):\n File "", line 2, in \n ZeroDivisionError: int division or modulo by zero\n\n The above exception was the direct cause of the following exception:\n\n Traceback (most recent call last):\n File "", line 4, in \n RuntimeError: Something bad happened\n\nA similar mechanism works implicitly if an exception is raised inside\nan exception handler: the previous exception is then attached as the\nnew exception\'s ``__context__`` attribute:\n\n >>> try:\n ... print(1 / 0)\n ... except:\n ... raise RuntimeError("Something bad happened")\n ...\n Traceback (most recent call last):\n File "", line 2, in \n ZeroDivisionError: int division or modulo by zero\n\n During handling of the above exception, another exception occurred:\n\n Traceback (most recent call last):\n File "", line 4, in \n RuntimeError: Something bad happened\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information about handling exceptions is in section\n*The try statement*.\n', 'return': '\nThe ``return`` statement\n************************\n\n return_stmt ::= "return" [expression_list]\n\n``return`` may only occur syntactically nested in a function\ndefinition, not within a nested class definition.\n\nIf an expression list is present, it is evaluated, else ``None`` is\nsubstituted.\n\n``return`` leaves the current function call with the expression list\n(or ``None``) as return value.\n\nWhen ``return`` passes control out of a ``try`` statement with a\n``finally`` clause, that ``finally`` clause is executed before really\nleaving the function.\n\nIn a generator function, the ``return`` statement indicates that the\ngenerator is done and will cause ``StopIteration`` to be raised. The\nreturned value (if any) is used as an argument to construct\n``StopIteration`` and becomes the ``StopIteration.value`` attribute.\n', - 'sequence-types': "\nEmulating container types\n*************************\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which ``0 <= k < N``\nwhere *N* is the length of the sequence, or slice objects, which\ndefine a range of items. It is also recommended that mappings provide\nthe methods ``keys()``, ``values()``, ``items()``, ``get()``,\n``clear()``, ``setdefault()``, ``pop()``, ``popitem()``, ``copy()``,\nand ``update()`` behaving similar to those for Python's standard\ndictionary objects. The ``collections`` module provides a\n``MutableMapping`` abstract base class to help create those methods\nfrom a base set of ``__getitem__()``, ``__setitem__()``,\n``__delitem__()``, and ``keys()``. Mutable sequences should provide\nmethods ``append()``, ``count()``, ``index()``, ``extend()``,\n``insert()``, ``pop()``, ``remove()``, ``reverse()`` and ``sort()``,\nlike Python standard list objects. Finally, sequence types should\nimplement addition (meaning concatenation) and multiplication (meaning\nrepetition) by defining the methods ``__add__()``, ``__radd__()``,\n``__iadd__()``, ``__mul__()``, ``__rmul__()`` and ``__imul__()``\ndescribed below; they should not define other numerical operators. It\nis recommended that both mappings and sequences implement the\n``__contains__()`` method to allow efficient use of the ``in``\noperator; for mappings, ``in`` should search the mapping's keys; for\nsequences, it should search through the values. It is further\nrecommended that both mappings and sequences implement the\n``__iter__()`` method to allow efficient iteration through the\ncontainer; for mappings, ``__iter__()`` should be the same as\n``keys()``; for sequences, it should iterate through the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function ``len()``. Should return\n the length of the object, an integer ``>=`` 0. Also, an object\n that doesn't define a ``__bool__()`` method and whose ``__len__()``\n method returns zero is considered to be false in a Boolean context.\n\nNote: Slicing is done exclusively with the following three methods. A\n call like\n\n a[1:2] = b\n\n is translated to\n\n a[slice(1, 2, None)] = b\n\n and so forth. Missing slice items are always filled in with\n ``None``.\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of ``self[key]``. For sequence\n types, the accepted keys should be integers and slice objects.\n Note that the special interpretation of negative indexes (if the\n class wishes to emulate a sequence type) is up to the\n ``__getitem__()`` method. If *key* is of an inappropriate type,\n ``TypeError`` may be raised; if of a value outside the set of\n indexes for the sequence (after any special interpretation of\n negative values), ``IndexError`` should be raised. For mapping\n types, if *key* is missing (not in the container), ``KeyError``\n should be raised.\n\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the ``__getitem__()`` method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the ``__getitem__()``\n method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method ``keys()``.\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the ``reversed()`` built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the ``__reversed__()`` method is not provided, the\n ``reversed()`` built-in will fall back to using the sequence\n protocol (``__len__()`` and ``__getitem__()``). Objects that\n support the sequence protocol should only provide\n ``__reversed__()`` if they can provide an implementation that is\n more efficient than the one provided by ``reversed()``.\n\nThe membership test operators (``in`` and ``not in``) are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don't define ``__contains__()``, the membership\n test first tries iteration via ``__iter__()``, then the old\n sequence iteration protocol via ``__getitem__()``, see *this\n section in the language reference*.\n", + 'sequence-types': "\nEmulating container types\n*************************\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which ``0 <= k < N``\nwhere *N* is the length of the sequence, or slice objects, which\ndefine a range of items. It is also recommended that mappings provide\nthe methods ``keys()``, ``values()``, ``items()``, ``get()``,\n``clear()``, ``setdefault()``, ``pop()``, ``popitem()``, ``copy()``,\nand ``update()`` behaving similar to those for Python's standard\ndictionary objects. The ``collections`` module provides a\n``MutableMapping`` abstract base class to help create those methods\nfrom a base set of ``__getitem__()``, ``__setitem__()``,\n``__delitem__()``, and ``keys()``. Mutable sequences should provide\nmethods ``append()``, ``count()``, ``index()``, ``extend()``,\n``insert()``, ``pop()``, ``remove()``, ``reverse()`` and ``sort()``,\nlike Python standard list objects. Finally, sequence types should\nimplement addition (meaning concatenation) and multiplication (meaning\nrepetition) by defining the methods ``__add__()``, ``__radd__()``,\n``__iadd__()``, ``__mul__()``, ``__rmul__()`` and ``__imul__()``\ndescribed below; they should not define other numerical operators. It\nis recommended that both mappings and sequences implement the\n``__contains__()`` method to allow efficient use of the ``in``\noperator; for mappings, ``in`` should search the mapping's keys; for\nsequences, it should search through the values. It is further\nrecommended that both mappings and sequences implement the\n``__iter__()`` method to allow efficient iteration through the\ncontainer; for mappings, ``__iter__()`` should be the same as\n``keys()``; for sequences, it should iterate through the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function ``len()``. Should return\n the length of the object, an integer ``>=`` 0. Also, an object\n that doesn't define a ``__bool__()`` method and whose ``__len__()``\n method returns zero is considered to be false in a Boolean context.\n\nobject.__length_hint__(self)\n\n Called to implement ``operator.length_hint()``. Should return an\n estimated length for the object (which may be greater or less than\n the actual length). The length must be an integer ``>=`` 0. This\n method is purely an optimization and is never required for\n correctness.\n\n New in version 3.4.\n\nNote: Slicing is done exclusively with the following three methods. A\n call like\n\n a[1:2] = b\n\n is translated to\n\n a[slice(1, 2, None)] = b\n\n and so forth. Missing slice items are always filled in with\n ``None``.\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of ``self[key]``. For sequence\n types, the accepted keys should be integers and slice objects.\n Note that the special interpretation of negative indexes (if the\n class wishes to emulate a sequence type) is up to the\n ``__getitem__()`` method. If *key* is of an inappropriate type,\n ``TypeError`` may be raised; if of a value outside the set of\n indexes for the sequence (after any special interpretation of\n negative values), ``IndexError`` should be raised. For mapping\n types, if *key* is missing (not in the container), ``KeyError``\n should be raised.\n\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the ``__getitem__()`` method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the ``__getitem__()``\n method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method ``keys()``.\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the ``reversed()`` built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the ``__reversed__()`` method is not provided, the\n ``reversed()`` built-in will fall back to using the sequence\n protocol (``__len__()`` and ``__getitem__()``). Objects that\n support the sequence protocol should only provide\n ``__reversed__()`` if they can provide an implementation that is\n more efficient than the one provided by ``reversed()``.\n\nThe membership test operators (``in`` and ``not in``) are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don't define ``__contains__()``, the membership\n test first tries iteration via ``__iter__()``, then the old\n sequence iteration protocol via ``__getitem__()``, see *this\n section in the language reference*.\n", 'shifting': '\nShifting operations\n*******************\n\nThe shifting operations have lower priority than the arithmetic\noperations:\n\n shift_expr ::= a_expr | shift_expr ( "<<" | ">>" ) a_expr\n\nThese operators accept integers as arguments. They shift the first\nargument to the left or right by the number of bits given by the\nsecond argument.\n\nA right shift by *n* bits is defined as division by ``pow(2,n)``. A\nleft shift by *n* bits is defined as multiplication with ``pow(2,n)``.\n\nNote: In the current implementation, the right-hand operand is required to\n be at most ``sys.maxsize``. If the right-hand operand is larger\n than ``sys.maxsize`` an ``OverflowError`` exception is raised.\n', 'slicings': '\nSlicings\n********\n\nA slicing selects a range of items in a sequence object (e.g., a\nstring, tuple or list). Slicings may be used as expressions or as\ntargets in assignment or ``del`` statements. The syntax for a\nslicing:\n\n slicing ::= primary "[" slice_list "]"\n slice_list ::= slice_item ("," slice_item)* [","]\n slice_item ::= expression | proper_slice\n proper_slice ::= [lower_bound] ":" [upper_bound] [ ":" [stride] ]\n lower_bound ::= expression\n upper_bound ::= expression\n stride ::= expression\n\nThere is ambiguity in the formal syntax here: anything that looks like\nan expression list also looks like a slice list, so any subscription\ncan be interpreted as a slicing. Rather than further complicating the\nsyntax, this is disambiguated by defining that in this case the\ninterpretation as a subscription takes priority over the\ninterpretation as a slicing (this is the case if the slice list\ncontains no proper slice).\n\nThe semantics for a slicing are as follows. The primary must evaluate\nto a mapping object, and it is indexed (using the same\n``__getitem__()`` method as normal subscription) with a key that is\nconstructed from the slice list, as follows. If the slice list\ncontains at least one comma, the key is a tuple containing the\nconversion of the slice items; otherwise, the conversion of the lone\nslice item is the key. The conversion of a slice item that is an\nexpression is that expression. The conversion of a proper slice is a\nslice object (see section *The standard type hierarchy*) whose\n``start``, ``stop`` and ``step`` attributes are the values of the\nexpressions given as lower bound, upper bound and stride,\nrespectively, substituting ``None`` for missing expressions.\n', 'specialattrs': '\nSpecial Attributes\n******************\n\nThe implementation adds a few special read-only attributes to several\nobject types, where they are relevant. Some of these are not reported\nby the ``dir()`` built-in function.\n\nobject.__dict__\n\n A dictionary or other mapping object used to store an object\'s\n (writable) attributes.\n\ninstance.__class__\n\n The class to which a class instance belongs.\n\nclass.__bases__\n\n The tuple of base classes of a class object.\n\nclass.__name__\n\n The name of the class or type.\n\nclass.__qualname__\n\n The *qualified name* of the class or type.\n\n New in version 3.3.\n\nclass.__mro__\n\n This attribute is a tuple of classes that are considered when\n looking for base classes during method resolution.\n\nclass.mro()\n\n This method can be overridden by a metaclass to customize the\n method resolution order for its instances. It is called at class\n instantiation, and its result is stored in ``__mro__``.\n\nclass.__subclasses__()\n\n Each class keeps a list of weak references to its immediate\n subclasses. This method returns a list of all those references\n still alive. Example:\n\n >>> int.__subclasses__()\n []\n\n-[ Footnotes ]-\n\n[1] Additional information on these special methods may be found in\n the Python Reference Manual (*Basic customization*).\n\n[2] As a consequence, the list ``[1, 2]`` is considered equal to\n ``[1.0, 2.0]``, and similarly for tuples.\n\n[3] They must have since the parser can\'t tell the type of the\n operands.\n\n[4] Cased characters are those with general category property being\n one of "Lu" (Letter, uppercase), "Ll" (Letter, lowercase), or "Lt"\n (Letter, titlecase).\n\n[5] To format only a tuple you should therefore provide a singleton\n tuple whose only element is the tuple to be formatted.\n', - 'specialnames': '\nSpecial method names\n********************\n\nA class can implement certain operations that are invoked by special\nsyntax (such as arithmetic operations or subscripting and slicing) by\ndefining methods with special names. This is Python\'s approach to\n*operator overloading*, allowing classes to define their own behavior\nwith respect to language operators. For instance, if a class defines\na method named ``__getitem__()``, and ``x`` is an instance of this\nclass, then ``x[i]`` is roughly equivalent to ``type(x).__getitem__(x,\ni)``. Except where mentioned, attempts to execute an operation raise\nan exception when no appropriate method is defined (typically\n``AttributeError`` or ``TypeError``).\n\nWhen implementing a class that emulates any built-in type, it is\nimportant that the emulation only be implemented to the degree that it\nmakes sense for the object being modelled. For example, some\nsequences may work well with retrieval of individual elements, but\nextracting a slice may not make sense. (One example of this is the\n``NodeList`` interface in the W3C\'s Document Object Model.)\n\n\nBasic customization\n===================\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. ``__new__()`` is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of ``__new__()`` should be the new object instance (usually\n an instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s ``__new__()`` method using\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If ``__new__()`` returns an instance of *cls*, then the new\n instance\'s ``__init__()`` method will be invoked like\n ``__init__(self[, ...])``, where *self* is the new instance and the\n remaining arguments are the same as were passed to ``__new__()``.\n\n If ``__new__()`` does not return an instance of *cls*, then the new\n instance\'s ``__init__()`` method will not be invoked.\n\n ``__new__()`` is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n ``__init__()`` method, the derived class\'s ``__init__()`` method,\n if any, must explicitly call it to ensure proper initialization of\n the base class part of the instance; for example:\n ``BaseClass.__init__(self, [args...])``. As a special constraint\n on constructors, no value may be returned; doing so will cause a\n ``TypeError`` to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a ``__del__()`` method,\n the derived class\'s ``__del__()`` method, if any, must explicitly\n call it to ensure proper deletion of the base class part of the\n instance. Note that it is possible (though not recommended!) for\n the ``__del__()`` method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n ``__del__()`` methods are called for objects that still exist when\n the interpreter exits.\n\n Note: ``del x`` doesn\'t directly call ``x.__del__()`` --- the former\n decrements the reference count for ``x`` by one, and the latter\n is only called when ``x``\'s reference count reaches zero. Some\n common situations that may prevent the reference count of an\n object from going to zero include: circular references between\n objects (e.g., a doubly-linked list or a tree data structure with\n parent and child pointers); a reference to the object on the\n stack frame of a function that caught an exception (the traceback\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\n a reference to the object on the stack frame that raised an\n unhandled exception in interactive mode (the traceback stored in\n ``sys.last_traceback`` keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing ``None`` in\n ``sys.last_traceback``. Circular references which are garbage are\n detected when the option cycle detector is enabled (it\'s on by\n default), but can only be cleaned up if there are no Python-\n level ``__del__()`` methods involved. Refer to the documentation\n for the ``gc`` module for more information about how\n ``__del__()`` methods are handled by the cycle detector,\n particularly the description of the ``garbage`` value.\n\n Warning: Due to the precarious circumstances under which ``__del__()``\n methods are invoked, exceptions that occur during their execution\n are ignored, and a warning is printed to ``sys.stderr`` instead.\n Also, when ``__del__()`` is invoked in response to a module being\n deleted (e.g., when execution of the program is done), other\n globals referenced by the ``__del__()`` method may already have\n been deleted or in the process of being torn down (e.g. the\n import machinery shutting down). For this reason, ``__del__()``\n methods should do the absolute minimum needed to maintain\n external invariants. Starting with version 1.5, Python\n guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the ``__del__()`` method is called.\n\nobject.__repr__(self)\n\n Called by the ``repr()`` built-in function to compute the\n "official" string representation of an object. If at all possible,\n this should look like a valid Python expression that could be used\n to recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n ``<...some useful description...>`` should be returned. The return\n value must be a string object. If a class defines ``__repr__()``\n but not ``__str__()``, then ``__repr__()`` is also used when an\n "informal" string representation of instances of that class is\n required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by ``str(object)`` and the built-in functions ``format()``\n and ``print()`` to compute the "informal" or nicely printable\n string representation of an object. The return value must be a\n *string* object.\n\n This method differs from ``object.__repr__()`` in that there is no\n expectation that ``__str__()`` return a valid Python expression: a\n more convenient or concise representation can be used.\n\n The default implementation defined by the built-in type ``object``\n calls ``object.__repr__()``.\n\nobject.__bytes__(self)\n\n Called by ``bytes()`` to compute a byte-string representation of an\n object. This should return a ``bytes`` object.\n\nobject.__format__(self, format_spec)\n\n Called by the ``format()`` built-in function (and by extension, the\n ``str.format()`` method of class ``str``) to produce a "formatted"\n string representation of an object. The ``format_spec`` argument is\n a string that contains a description of the formatting options\n desired. The interpretation of the ``format_spec`` argument is up\n to the type implementing ``__format__()``, however most classes\n will either delegate formatting to one of the built-in types, or\n use a similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: ``xy`` calls ``x.__gt__(y)``, and ``x>=y`` calls\n ``x.__ge__(y)``.\n\n A rich comparison method may return the singleton\n ``NotImplemented`` if it does not implement the operation for a\n given pair of arguments. By convention, ``False`` and ``True`` are\n returned for a successful comparison. However, these methods can\n return any value, so if the comparison operator is used in a\n Boolean context (e.g., in the condition of an ``if`` statement),\n Python will call ``bool()`` on the value to determine if the result\n is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\n Accordingly, when defining ``__eq__()``, one should also define\n ``__ne__()`` so that the operators will behave as expected. See\n the paragraph on ``__hash__()`` for some important notes on\n creating *hashable* objects which support custom comparison\n operations and are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\n other\'s reflection, ``__le__()`` and ``__ge__()`` are each other\'s\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\n reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see ``functools.total_ordering()``.\n\nobject.__hash__(self)\n\n Called by built-in function ``hash()`` and for operations on\n members of hashed collections including ``set``, ``frozenset``, and\n ``dict``. ``__hash__()`` should return an integer. The only\n required property is that objects which compare equal have the same\n hash value; it is advised to somehow mix together (e.g. using\n exclusive or) the hash values for the components of the object that\n also play a part in comparison of objects.\n\n If a class does not define an ``__eq__()`` method it should not\n define a ``__hash__()`` operation either; if it defines\n ``__eq__()`` but not ``__hash__()``, its instances will not be\n usable as items in hashable collections. If a class defines\n mutable objects and implements an ``__eq__()`` method, it should\n not implement ``__hash__()``, since the implementation of hashable\n collections requires that a key\'s hash value is immutable (if the\n object\'s hash value changes, it will be in the wrong hash bucket).\n\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\n by default; with them, all objects compare unequal (except with\n themselves) and ``x.__hash__()`` returns an appropriate value such\n that ``x == y`` implies both that ``x is y`` and ``hash(x) ==\n hash(y)``.\n\n A class that overrides ``__eq__()`` and does not define\n ``__hash__()`` will have its ``__hash__()`` implicitly set to\n ``None``. When the ``__hash__()`` method of a class is ``None``,\n instances of the class will raise an appropriate ``TypeError`` when\n a program attempts to retrieve their hash value, and will also be\n correctly identified as unhashable when checking ``isinstance(obj,\n collections.Hashable``).\n\n If a class that overrides ``__eq__()`` needs to retain the\n implementation of ``__hash__()`` from a parent class, the\n interpreter must be told this explicitly by setting ``__hash__ =\n .__hash__``.\n\n If a class that does not override ``__eq__()`` wishes to suppress\n hash support, it should include ``__hash__ = None`` in the class\n definition. A class which defines its own ``__hash__()`` that\n explicitly raises a ``TypeError`` would be incorrectly identified\n as hashable by an ``isinstance(obj, collections.Hashable)`` call.\n\n Note: By default, the ``__hash__()`` values of str, bytes and datetime\n objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the iteration order of\n dicts, sets and other mappings. Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also ``PYTHONHASHSEED``.\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n ``bool()``; should return ``False`` or ``True``. When this method\n is not defined, ``__len__()`` is called, if it is defined, and the\n object is considered true if its result is nonzero. If a class\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\n are considered true.\n\n\nCustomizing attribute access\n============================\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of ``x.name``)\nfor class instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for ``self``). ``name`` is the attribute name.\n This method should return the (computed) attribute value or raise\n an ``AttributeError`` exception.\n\n Note that if the attribute is found through the normal mechanism,\n ``__getattr__()`` is not called. (This is an intentional asymmetry\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\n for efficiency reasons and because otherwise ``__getattr__()``\n would have no way to access other attributes of the instance. Note\n that at least for instance variables, you can fake total control by\n not inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n ``__getattribute__()`` method below for a way to actually get total\n control over attribute access.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines\n ``__getattr__()``, the latter will not be called unless\n ``__getattribute__()`` either calls it explicitly or raises an\n ``AttributeError``. This method should return the (computed)\n attribute value or raise an ``AttributeError`` exception. In order\n to avoid infinite recursion in this method, its implementation\n should always call the base class method with the same name to\n access any attributes it needs, for example,\n ``object.__getattribute__(self, name)``.\n\n Note: This method may still be bypassed when looking up special methods\n as the result of implicit invocation via language syntax or\n built-in functions. See *Special method lookup*.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If ``__setattr__()`` wants to assign to an instance attribute, it\n should call the base class method with the same name, for example,\n ``object.__setattr__(self, name, value)``.\n\nobject.__delattr__(self, name)\n\n Like ``__setattr__()`` but for attribute deletion instead of\n assignment. This should only be implemented if ``del obj.name`` is\n meaningful for the object.\n\nobject.__dir__(self)\n\n Called when ``dir()`` is called on the object. A sequence must be\n returned. ``dir()`` converts the returned sequence to a list and\n sorts it.\n\n\nImplementing Descriptors\n------------------------\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' ``__dict__``.\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or ``None`` when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an\n ``AttributeError`` exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\n\nInvoking Descriptors\n--------------------\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\n``__delete__()``. If any of those methods are defined for an object,\nit is said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, ``a.x`` has a\nlookup chain starting with ``a.__dict__[\'x\']``, then\n``type(a).__dict__[\'x\']``, and continuing through the base classes of\n``type(a)`` excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called.\n\nThe starting point for descriptor invocation is a binding, ``a.x``.\nHow the arguments are assembled depends on ``a``:\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: ``x.__get__(a)``.\n\nInstance Binding\n If binding to an object instance, ``a.x`` is transformed into the\n call: ``type(a).__dict__[\'x\'].__get__(a, type(a))``.\n\nClass Binding\n If binding to a class, ``A.x`` is transformed into the call:\n ``A.__dict__[\'x\'].__get__(None, A)``.\n\nSuper Binding\n If ``a`` is an instance of ``super``, then the binding ``super(B,\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\n ``A`` immediately preceding ``B`` and then invokes the descriptor\n with the call: ``A.__dict__[\'m\'].__get__(obj, obj.__class__)``.\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\nIf it does not define ``__get__()``, then accessing the attribute will\nreturn the descriptor object itself unless there is a value in the\nobject\'s instance dictionary. If the descriptor defines ``__set__()``\nand/or ``__delete__()``, it is a data descriptor; if it defines\nneither, it is a non-data descriptor. Normally, data descriptors\ndefine both ``__get__()`` and ``__set__()``, while non-data\ndescriptors have just the ``__get__()`` method. Data descriptors with\n``__set__()`` and ``__get__()`` defined always override a redefinition\nin an instance dictionary. In contrast, non-data descriptors can be\noverridden by instances.\n\nPython methods (including ``staticmethod()`` and ``classmethod()``)\nare implemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe ``property()`` function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n---------\n\nBy default, instances of classes have a dictionary for attribute\nstorage. This wastes space for objects having very few instance\nvariables. The space consumption can become acute when creating large\nnumbers of instances.\n\nThe default can be overridden by defining *__slots__* in a class\ndefinition. The *__slots__* declaration takes a sequence of instance\nvariables and reserves just enough space in each instance to hold a\nvalue for each variable. Space is saved because *__dict__* is not\ncreated for each instance.\n\nobject.__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n class, *__slots__* reserves space for the declared variables and\n prevents the automatic creation of *__dict__* and *__weakref__* for\n each instance.\n\n\nNotes on using *__slots__*\n~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises ``AttributeError``. If\n dynamic assignment of new variables is desired, then add\n ``\'__dict__\'`` to the sequence of strings in the *__slots__*\n declaration.\n\n* Without a *__weakref__* variable for each instance, classes defining\n *__slots__* do not support weak references to its instances. If weak\n reference support is needed, then add ``\'__weakref__\'`` to the\n sequence of strings in the *__slots__* declaration.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the instance\n variable defined by the base class slot is inaccessible (except by\n retrieving its descriptor directly from the base class). This\n renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as ``int``, ``str`` and\n ``tuple``.\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\n also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n\n\nCustomizing class creation\n==========================\n\nBy default, classes are constructed using ``type()``. The class body\nis executed in a new namespace and the class name is bound locally to\nthe result of ``type(name, bases, namespace)``.\n\nThe class creation process can be customised by passing the\n``metaclass`` keyword argument in the class definition line, or by\ninheriting from an existing class that included such an argument. In\nthe following example, both ``MyClass`` and ``MySubclass`` are\ninstances of ``Meta``:\n\n class Meta(type):\n pass\n\n class MyClass(metaclass=Meta):\n pass\n\n class MySubclass(MyClass):\n pass\n\nAny other keyword arguments that are specified in the class definition\nare passed through to all metaclass operations described below.\n\nWhen a class definition is executed, the following steps occur:\n\n* the appropriate metaclass is determined\n\n* the class namespace is prepared\n\n* the class body is executed\n\n* the class object is created\n\n\nDetermining the appropriate metaclass\n-------------------------------------\n\nThe appropriate metaclass for a class definition is determined as\nfollows:\n\n* if no bases and no explicit metaclass are given, then ``type()`` is\n used\n\n* if an explicit metaclass is given and it is *not* an instance of\n ``type()``, then it is used directly as the metaclass\n\n* if an instance of ``type()`` is given as the explicit metaclass, or\n bases are defined, then the most derived metaclass is used\n\nThe most derived metaclass is selected from the explicitly specified\nmetaclass (if any) and the metaclasses (i.e. ``type(cls)``) of all\nspecified base classes. The most derived metaclass is one which is a\nsubtype of *all* of these candidate metaclasses. If none of the\ncandidate metaclasses meets that criterion, then the class definition\nwill fail with ``TypeError``.\n\n\nPreparing the class namespace\n-----------------------------\n\nOnce the appropriate metaclass has been identified, then the class\nnamespace is prepared. If the metaclass has a ``__prepare__``\nattribute, it is called as ``namespace = metaclass.__prepare__(name,\nbases, **kwds)`` (where the additional keyword arguments, if any, come\nfrom the class definition).\n\nIf the metaclass has no ``__prepare__`` attribute, then the class\nnamespace is initialised as an empty ``dict()`` instance.\n\nSee also:\n\n **PEP 3115** - Metaclasses in Python 3000\n Introduced the ``__prepare__`` namespace hook\n\n\nExecuting the class body\n------------------------\n\nThe class body is executed (approximately) as ``exec(body, globals(),\nnamespace)``. The key difference from a normal call to ``exec()`` is\nthat lexical scoping allows the class body (including any methods) to\nreference names from the current and outer scopes when the class\ndefinition occurs inside a function.\n\nHowever, even when the class definition occurs inside the function,\nmethods defined inside the class still cannot see names defined at the\nclass scope. Class variables must be accessed through the first\nparameter of instance or class methods, and cannot be accessed at all\nfrom static methods.\n\n\nCreating the class object\n-------------------------\n\nOnce the class namespace has been populated by executing the class\nbody, the class object is created by calling ``metaclass(name, bases,\nnamespace, **kwds)`` (the additional keywords passed here are the same\nas those passed to ``__prepare__``).\n\nThis class object is the one that will be referenced by the zero-\nargument form of ``super()``. ``__class__`` is an implicit closure\nreference created by the compiler if any methods in a class body refer\nto either ``__class__`` or ``super``. This allows the zero argument\nform of ``super()`` to correctly identify the class being defined\nbased on lexical scoping, while the class or instance that was used to\nmake the current call is identified based on the first argument passed\nto the method.\n\nAfter the class object is created, it is passed to the class\ndecorators included in the class definition (if any) and the resulting\nobject is bound in the local namespace as the defined class.\n\nSee also:\n\n **PEP 3135** - New super\n Describes the implicit ``__class__`` closure reference\n\n\nMetaclass example\n-----------------\n\nThe potential uses for metaclasses are boundless. Some ideas that have\nbeen explored include logging, interface checking, automatic\ndelegation, automatic property creation, proxies, frameworks, and\nautomatic resource locking/synchronization.\n\nHere is an example of a metaclass that uses an\n``collections.OrderedDict`` to remember the order that class members\nwere defined:\n\n class OrderedClass(type):\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwds):\n return collections.OrderedDict()\n\n def __new__(cls, name, bases, namespace, **kwds):\n result = type.__new__(cls, name, bases, dict(namespace))\n result.members = tuple(namespace)\n return result\n\n class A(metaclass=OrderedClass):\n def one(self): pass\n def two(self): pass\n def three(self): pass\n def four(self): pass\n\n >>> A.members\n (\'__module__\', \'one\', \'two\', \'three\', \'four\')\n\nWhen the class definition for *A* gets executed, the process begins\nwith calling the metaclass\'s ``__prepare__()`` method which returns an\nempty ``collections.OrderedDict``. That mapping records the methods\nand attributes of *A* as they are defined within the body of the class\nstatement. Once those definitions are executed, the ordered dictionary\nis fully populated and the metaclass\'s ``__new__()`` method gets\ninvoked. That method builds the new type and it saves the ordered\ndictionary keys in an attribute called ``members``.\n\n\nCustomizing instance and subclass checks\n========================================\n\nThe following methods are used to override the default behavior of the\n``isinstance()`` and ``issubclass()`` built-in functions.\n\nIn particular, the metaclass ``abc.ABCMeta`` implements these methods\nin order to allow the addition of Abstract Base Classes (ABCs) as\n"virtual base classes" to any class or type (including built-in\ntypes), including other ABCs.\n\nclass.__instancecheck__(self, instance)\n\n Return true if *instance* should be considered a (direct or\n indirect) instance of *class*. If defined, called to implement\n ``isinstance(instance, class)``.\n\nclass.__subclasscheck__(self, subclass)\n\n Return true if *subclass* should be considered a (direct or\n indirect) subclass of *class*. If defined, called to implement\n ``issubclass(subclass, class)``.\n\nNote that these methods are looked up on the type (metaclass) of a\nclass. They cannot be defined as class methods in the actual class.\nThis is consistent with the lookup of special methods that are called\non instances, only in this case the instance is itself a class.\n\nSee also:\n\n **PEP 3119** - Introducing Abstract Base Classes\n Includes the specification for customizing ``isinstance()`` and\n ``issubclass()`` behavior through ``__instancecheck__()`` and\n ``__subclasscheck__()``, with motivation for this functionality\n in the context of adding Abstract Base Classes (see the ``abc``\n module) to the language.\n\n\nEmulating callable objects\n==========================\n\nobject.__call__(self[, args...])\n\n Called when the instance is "called" as a function; if this method\n is defined, ``x(arg1, arg2, ...)`` is a shorthand for\n ``x.__call__(arg1, arg2, ...)``.\n\n\nEmulating container types\n=========================\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which ``0 <= k < N``\nwhere *N* is the length of the sequence, or slice objects, which\ndefine a range of items. It is also recommended that mappings provide\nthe methods ``keys()``, ``values()``, ``items()``, ``get()``,\n``clear()``, ``setdefault()``, ``pop()``, ``popitem()``, ``copy()``,\nand ``update()`` behaving similar to those for Python\'s standard\ndictionary objects. The ``collections`` module provides a\n``MutableMapping`` abstract base class to help create those methods\nfrom a base set of ``__getitem__()``, ``__setitem__()``,\n``__delitem__()``, and ``keys()``. Mutable sequences should provide\nmethods ``append()``, ``count()``, ``index()``, ``extend()``,\n``insert()``, ``pop()``, ``remove()``, ``reverse()`` and ``sort()``,\nlike Python standard list objects. Finally, sequence types should\nimplement addition (meaning concatenation) and multiplication (meaning\nrepetition) by defining the methods ``__add__()``, ``__radd__()``,\n``__iadd__()``, ``__mul__()``, ``__rmul__()`` and ``__imul__()``\ndescribed below; they should not define other numerical operators. It\nis recommended that both mappings and sequences implement the\n``__contains__()`` method to allow efficient use of the ``in``\noperator; for mappings, ``in`` should search the mapping\'s keys; for\nsequences, it should search through the values. It is further\nrecommended that both mappings and sequences implement the\n``__iter__()`` method to allow efficient iteration through the\ncontainer; for mappings, ``__iter__()`` should be the same as\n``keys()``; for sequences, it should iterate through the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function ``len()``. Should return\n the length of the object, an integer ``>=`` 0. Also, an object\n that doesn\'t define a ``__bool__()`` method and whose ``__len__()``\n method returns zero is considered to be false in a Boolean context.\n\nNote: Slicing is done exclusively with the following three methods. A\n call like\n\n a[1:2] = b\n\n is translated to\n\n a[slice(1, 2, None)] = b\n\n and so forth. Missing slice items are always filled in with\n ``None``.\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of ``self[key]``. For sequence\n types, the accepted keys should be integers and slice objects.\n Note that the special interpretation of negative indexes (if the\n class wishes to emulate a sequence type) is up to the\n ``__getitem__()`` method. If *key* is of an inappropriate type,\n ``TypeError`` may be raised; if of a value outside the set of\n indexes for the sequence (after any special interpretation of\n negative values), ``IndexError`` should be raised. For mapping\n types, if *key* is missing (not in the container), ``KeyError``\n should be raised.\n\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the ``__getitem__()`` method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the ``__getitem__()``\n method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method ``keys()``.\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the ``reversed()`` built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the ``__reversed__()`` method is not provided, the\n ``reversed()`` built-in will fall back to using the sequence\n protocol (``__len__()`` and ``__getitem__()``). Objects that\n support the sequence protocol should only provide\n ``__reversed__()`` if they can provide an implementation that is\n more efficient than the one provided by ``reversed()``.\n\nThe membership test operators (``in`` and ``not in``) are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don\'t define ``__contains__()``, the membership\n test first tries iteration via ``__iter__()``, then the old\n sequence iteration protocol via ``__getitem__()``, see *this\n section in the language reference*.\n\n\nEmulating numeric types\n=======================\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__truediv__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\n ``|``). For instance, to evaluate the expression ``x + y``, where\n *x* is an instance of a class that has an ``__add__()`` method,\n ``x.__add__(y)`` is called. The ``__divmod__()`` method should be\n the equivalent to using ``__floordiv__()`` and ``__mod__()``; it\n should not be related to ``__truediv__()``. Note that\n ``__pow__()`` should be defined to accept an optional third\n argument if the ternary version of the built-in ``pow()`` function\n is to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return ``NotImplemented``.\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\n ``|``) with reflected (swapped) operands. These functions are only\n called if the left operand does not support the corresponding\n operation and the operands are of different types. [2] For\n instance, to evaluate the expression ``x - y``, where *y* is an\n instance of a class that has an ``__rsub__()`` method,\n ``y.__rsub__(x)`` is called if ``x.__sub__(y)`` returns\n *NotImplemented*.\n\n Note that ternary ``pow()`` will not try calling ``__rpow__()``\n (the coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left operand\'s\n type and that subclass provides the reflected method for the\n operation, this method will be called before the left operand\'s\n non-reflected method. This behavior allows subclasses to\n override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments (``+=``, ``-=``, ``*=``, ``/=``, ``//=``, ``%=``,\n ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods\n should attempt to do the operation in-place (modifying *self*) and\n return the result (which could be, but does not have to be,\n *self*). If a specific method is not defined, the augmented\n assignment falls back to the normal methods. For instance, to\n execute the statement ``x += y``, where *x* is an instance of a\n class that has an ``__iadd__()`` method, ``x.__iadd__(y)`` is\n called. If *x* is an instance of a class that does not define a\n ``__iadd__()`` method, ``x.__add__(y)`` and ``y.__radd__(x)`` are\n considered, as with the evaluation of ``x + y``.\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations (``-``, ``+``,\n ``abs()`` and ``~``).\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__float__(self)\nobject.__round__(self[, n])\n\n Called to implement the built-in functions ``complex()``,\n ``int()``, ``float()`` and ``round()``. Should return a value of\n the appropriate type.\n\nobject.__index__(self)\n\n Called to implement ``operator.index()``. Also called whenever\n Python needs an integer object (such as in slicing, or in the\n built-in ``bin()``, ``hex()`` and ``oct()`` functions). Must return\n an integer.\n\n\nWith Statement Context Managers\n===============================\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a ``with`` statement. The context\nmanager handles the entry into, and the exit from, the desired runtime\ncontext for the execution of the block of code. Context managers are\nnormally invoked using the ``with`` statement (described in section\n*The with statement*), but can also be used by directly invoking their\nmethods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The ``with``\n statement will bind this method\'s return value to the target(s)\n specified in the ``as`` clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be ``None``.\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that ``__exit__()`` methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n ``with`` statement.\n\n\nSpecial method lookup\n=====================\n\nFor custom classes, implicit invocations of special methods are only\nguaranteed to work correctly if defined on an object\'s type, not in\nthe object\'s instance dictionary. That behaviour is the reason why\nthe following code raises an exception:\n\n >>> class C:\n ... pass\n ...\n >>> c = C()\n >>> c.__len__ = lambda: 5\n >>> len(c)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: object of type \'C\' has no len()\n\nThe rationale behind this behaviour lies with a number of special\nmethods such as ``__hash__()`` and ``__repr__()`` that are implemented\nby all objects, including type objects. If the implicit lookup of\nthese methods used the conventional lookup process, they would fail\nwhen invoked on the type object itself:\n\n >>> 1 .__hash__() == hash(1)\n True\n >>> int.__hash__() == hash(int)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: descriptor \'__hash__\' of \'int\' object needs an argument\n\nIncorrectly attempting to invoke an unbound method of a class in this\nway is sometimes referred to as \'metaclass confusion\', and is avoided\nby bypassing the instance when looking up special methods:\n\n >>> type(1).__hash__(1) == hash(1)\n True\n >>> type(int).__hash__(int) == hash(int)\n True\n\nIn addition to bypassing any instance attributes in the interest of\ncorrectness, implicit special method lookup generally also bypasses\nthe ``__getattribute__()`` method even of the object\'s metaclass:\n\n >>> class Meta(type):\n ... def __getattribute__(*args):\n ... print("Metaclass getattribute invoked")\n ... return type.__getattribute__(*args)\n ...\n >>> class C(object, metaclass=Meta):\n ... def __len__(self):\n ... return 10\n ... def __getattribute__(*args):\n ... print("Class getattribute invoked")\n ... return object.__getattribute__(*args)\n ...\n >>> c = C()\n >>> c.__len__() # Explicit lookup via instance\n Class getattribute invoked\n 10\n >>> type(c).__len__(c) # Explicit lookup via type\n Metaclass getattribute invoked\n 10\n >>> len(c) # Implicit lookup\n 10\n\nBypassing the ``__getattribute__()`` machinery in this fashion\nprovides significant scope for speed optimisations within the\ninterpreter, at the cost of some flexibility in the handling of\nspecial methods (the special method *must* be set on the class object\nitself in order to be consistently invoked by the interpreter).\n\n-[ Footnotes ]-\n\n[1] It *is* possible in some cases to change an object\'s type, under\n certain controlled conditions. It generally isn\'t a good idea\n though, since it can lead to some very strange behaviour if it is\n handled incorrectly.\n\n[2] For operands of the same type, it is assumed that if the non-\n reflected method (such as ``__add__()``) fails the operation is\n not supported, which is why the reflected method is not called.\n', - 'string-methods': '\nString Methods\n**************\n\nStrings implement all of the *common* sequence operations, along with\nthe additional methods described below.\n\nStrings also support two styles of string formatting, one providing a\nlarge degree of flexibility and customization (see ``str.format()``,\n*Format String Syntax* and *String Formatting*) and the other based on\nC ``printf`` style formatting that handles a narrower range of types\nand is slightly harder to use correctly, but is often faster for the\ncases it can handle (*printf-style String Formatting*).\n\nThe *Text Processing Services* section of the standard library covers\na number of other modules that provide various text related utilities\n(including regular expression support in the ``re`` module).\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\nstr.casefold()\n\n Return a casefolded copy of the string. Casefolded strings may be\n used for caseless matching.\n\n Casefolding is similar to lowercasing but more aggressive because\n it is intended to remove all case distinctions in a string. For\n example, the German lowercase letter ``\'\xc3\x9f\'`` is equivalent to\n ``"ss"``. Since it is already lowercase, ``lower()`` would do\n nothing to ``\'\xc3\x9f\'``; ``casefold()`` converts it to ``"ss"``.\n\n The casefolding algorithm is described in section 3.13 of the\n Unicode Standard.\n\n New in version 3.3.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.encode(encoding="utf-8", errors="strict")\n\n Return an encoded version of the string as a bytes object. Default\n encoding is ``\'utf-8\'``. *errors* may be given to set a different\n error handling scheme. The default for *errors* is ``\'strict\'``,\n meaning that encoding errors raise a ``UnicodeError``. Other\n possible values are ``\'ignore\'``, ``\'replace\'``,\n ``\'xmlcharrefreplace\'``, ``\'backslashreplace\'`` and any other name\n registered via ``codecs.register_error()``, see section *Codec Base\n Classes*. For a list of possible encodings, see section *Standard\n Encodings*.\n\n Changed in version 3.1: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return ``True`` if the string ends with the specified *suffix*,\n otherwise return ``False``. *suffix* can also be a tuple of\n suffixes to look for. With optional *start*, test beginning at\n that position. With optional *end*, stop comparing at that\n position.\n\nstr.expandtabs([tabsize])\n\n Return a copy of the string where all tab characters are replaced\n by zero or more spaces, depending on the current column and the\n given tab size. The column number is reset to zero after each\n newline occurring in the string. If *tabsize* is not given, a tab\n size of ``8`` characters is assumed. This doesn\'t understand other\n non-printing characters or escape sequences.\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` if *sub* is not found.\n\n Note: The ``find()`` method should be used only if you need to know the\n position of *sub*. To check if *sub* is a substring or not, use\n the ``in`` operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces ``{}``. Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\nstr.format_map(mapping)\n\n Similar to ``str.format(**mapping)``, except that ``mapping`` is\n used directly and not copied to a ``dict`` . This is useful if for\n example ``mapping`` is a dict subclass:\n\n >>> class Default(dict):\n ... def __missing__(self, key):\n ... return key\n ...\n >>> \'{name} was born in {country}\'.format_map(Default(name=\'Guido\'))\n \'Guido was born in country\'\n\n New in version 3.2.\n\nstr.index(sub[, start[, end]])\n\n Like ``find()``, but raise ``ValueError`` when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise. A character\n ``c`` is alphanumeric if one of the following returns ``True``:\n ``c.isalpha()``, ``c.isdecimal()``, ``c.isdigit()``, or\n ``c.isnumeric()``.\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise. Alphabetic\n characters are those characters defined in the Unicode character\n database as "Letter", i.e., those with general category property\n being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is\n different from the "Alphabetic" property defined in the Unicode\n Standard.\n\nstr.isdecimal()\n\n Return true if all characters in the string are decimal characters\n and there is at least one character, false otherwise. Decimal\n characters are those from general category "Nd". This category\n includes digit characters, and all characters that can be used to\n form decimal-radix numbers, e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise. Digits include decimal\n characters and digits that need special handling, such as the\n compatibility superscript digits. Formally, a digit is a character\n that has the property value Numeric_Type=Digit or\n Numeric_Type=Decimal.\n\nstr.isidentifier()\n\n Return true if the string is a valid identifier according to the\n language definition, section *Identifiers and keywords*.\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\nstr.isnumeric()\n\n Return true if all characters in the string are numeric characters,\n and there is at least one character, false otherwise. Numeric\n characters include digit characters, and all characters that have\n the Unicode numeric value property, e.g. U+2155, VULGAR FRACTION\n ONE FIFTH. Formally, numeric characters are those with the\n property value Numeric_Type=Digit, Numeric_Type=Decimal or\n Numeric_Type=Numeric.\n\nstr.isprintable()\n\n Return true if all characters in the string are printable or the\n string is empty, false otherwise. Nonprintable characters are\n those characters defined in the Unicode character database as\n "Other" or "Separator", excepting the ASCII space (0x20) which is\n considered printable. (Note that printable characters in this\n context are those which should not be escaped when ``repr()`` is\n invoked on a string. It has no bearing on the handling of strings\n written to ``sys.stdout`` or ``sys.stderr``.)\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise. Whitespace\n characters are those characters defined in the Unicode character\n database as "Other" or "Separator" and those with bidirectional\n property being one of "WS", "B", or "S".\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. A ``TypeError`` will be raised if there are\n any non-string values in *iterable*, including ``bytes`` objects.\n The separator between elements is the string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\n The lowercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\nstatic str.maketrans(x[, y[, z]])\n\n This static method returns a translation table usable for\n ``str.translate()``.\n\n If there is only one argument, it must be a dictionary mapping\n Unicode ordinals (integers) or characters (strings of length 1) to\n Unicode ordinals, strings (of arbitrary lengths) or None.\n Character keys will then be converted to ordinals.\n\n If there are two arguments, they must be strings of equal length,\n and in the resulting dictionary, each character in x will be mapped\n to the character at the same position in y. If there is a third\n argument, it must be a string, whose characters will be mapped to\n None in the result.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\n is not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\nstr.rsplit(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n ``None``, any whitespace string is a separator. Except for\n splitting from the right, ``rsplit()`` behaves like ``split()``\n which is described in detail below.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\nstr.split(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most ``maxsplit+1``\n elements). If *maxsplit* is not specified or ``-1``, then there is\n no limit on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n ``\'1,,2\'.split(\',\')`` returns ``[\'1\', \'\', \'2\']``). The *sep*\n argument may consist of multiple characters (for example,\n ``\'1<>2<>3\'.split(\'<>\')`` returns ``[\'1\', \'2\', \'3\']``). Splitting\n an empty string with a specified separator returns ``[\'\']``.\n\n If *sep* is not specified or is ``None``, a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a ``None`` separator returns\n ``[]``.\n\n For example, ``\' 1 2 3 \'.split()`` returns ``[\'1\', \'2\', \'3\']``,\n and ``\' 1 2 3 \'.split(None, 1)`` returns ``[\'1\', \'2 3 \']``.\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. This method uses the *universal newlines* approach to\n splitting lines. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\n For example, ``\'ab c\\n\\nde fg\\rkl\\r\\n\'.splitlines()`` returns\n ``[\'ab c\', \'\', \'de fg\', \'kl\']``, while the same call with\n ``splitlines(True)`` returns ``[\'ab c\\n\', \'\\n\', \'de fg\\r\',\n \'kl\\r\\n\']``.\n\n Unlike ``split()`` when a delimiter string *sep* is given, this\n method returns an empty list for the empty string, and a terminal\n line break does not result in an extra line.\n\nstr.startswith(prefix[, start[, end]])\n\n Return ``True`` if string starts with the *prefix*, otherwise\n return ``False``. *prefix* can also be a tuple of prefixes to look\n for. With optional *start*, test string beginning at that\n position. With optional *end*, stop comparing string at that\n position.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or ``None``, the\n *chars* argument defaults to removing whitespace. The *chars*\n argument is not a prefix or suffix; rather, all combinations of its\n values are stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa. Note that it is not necessarily true that\n ``s.swapcase().swapcase() == s``.\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n ... lambda mo: mo.group(0)[0].upper() +\n ... mo.group(0)[1:].lower(),\n ... s)\n ...\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\nstr.translate(map)\n\n Return a copy of the *s* where all characters have been mapped\n through the *map* which must be a dictionary of Unicode ordinals\n (integers) to Unicode ordinals, strings or ``None``. Unmapped\n characters are left untouched. Characters mapped to ``None`` are\n deleted.\n\n You can use ``str.maketrans()`` to create a translation map from\n character-to-character mappings in different formats.\n\n Note: An even more flexible approach is to create a custom character\n mapping codec using the ``codecs`` module (see\n ``encodings.cp1251`` for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that ``str.upper().isupper()`` might\n be ``False`` if ``s`` contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\n The uppercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to ``len(s)``.\n', + 'specialnames': '\nSpecial method names\n********************\n\nA class can implement certain operations that are invoked by special\nsyntax (such as arithmetic operations or subscripting and slicing) by\ndefining methods with special names. This is Python\'s approach to\n*operator overloading*, allowing classes to define their own behavior\nwith respect to language operators. For instance, if a class defines\na method named ``__getitem__()``, and ``x`` is an instance of this\nclass, then ``x[i]`` is roughly equivalent to ``type(x).__getitem__(x,\ni)``. Except where mentioned, attempts to execute an operation raise\nan exception when no appropriate method is defined (typically\n``AttributeError`` or ``TypeError``).\n\nWhen implementing a class that emulates any built-in type, it is\nimportant that the emulation only be implemented to the degree that it\nmakes sense for the object being modelled. For example, some\nsequences may work well with retrieval of individual elements, but\nextracting a slice may not make sense. (One example of this is the\n``NodeList`` interface in the W3C\'s Document Object Model.)\n\n\nBasic customization\n===================\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. ``__new__()`` is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of ``__new__()`` should be the new object instance (usually\n an instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s ``__new__()`` method using\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If ``__new__()`` returns an instance of *cls*, then the new\n instance\'s ``__init__()`` method will be invoked like\n ``__init__(self[, ...])``, where *self* is the new instance and the\n remaining arguments are the same as were passed to ``__new__()``.\n\n If ``__new__()`` does not return an instance of *cls*, then the new\n instance\'s ``__init__()`` method will not be invoked.\n\n ``__new__()`` is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n ``__init__()`` method, the derived class\'s ``__init__()`` method,\n if any, must explicitly call it to ensure proper initialization of\n the base class part of the instance; for example:\n ``BaseClass.__init__(self, [args...])``. As a special constraint\n on constructors, no value may be returned; doing so will cause a\n ``TypeError`` to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a ``__del__()`` method,\n the derived class\'s ``__del__()`` method, if any, must explicitly\n call it to ensure proper deletion of the base class part of the\n instance. Note that it is possible (though not recommended!) for\n the ``__del__()`` method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n ``__del__()`` methods are called for objects that still exist when\n the interpreter exits.\n\n Note: ``del x`` doesn\'t directly call ``x.__del__()`` --- the former\n decrements the reference count for ``x`` by one, and the latter\n is only called when ``x``\'s reference count reaches zero. Some\n common situations that may prevent the reference count of an\n object from going to zero include: circular references between\n objects (e.g., a doubly-linked list or a tree data structure with\n parent and child pointers); a reference to the object on the\n stack frame of a function that caught an exception (the traceback\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\n a reference to the object on the stack frame that raised an\n unhandled exception in interactive mode (the traceback stored in\n ``sys.last_traceback`` keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing ``None`` in\n ``sys.last_traceback``. Circular references which are garbage are\n detected and cleaned up when the cyclic garbage collector is\n enabled (it\'s on by default). Refer to the documentation for the\n ``gc`` module for more information about this topic.\n\n Warning: Due to the precarious circumstances under which ``__del__()``\n methods are invoked, exceptions that occur during their execution\n are ignored, and a warning is printed to ``sys.stderr`` instead.\n Also, when ``__del__()`` is invoked in response to a module being\n deleted (e.g., when execution of the program is done), other\n globals referenced by the ``__del__()`` method may already have\n been deleted or in the process of being torn down (e.g. the\n import machinery shutting down). For this reason, ``__del__()``\n methods should do the absolute minimum needed to maintain\n external invariants. Starting with version 1.5, Python\n guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the ``__del__()`` method is called.\n\nobject.__repr__(self)\n\n Called by the ``repr()`` built-in function to compute the\n "official" string representation of an object. If at all possible,\n this should look like a valid Python expression that could be used\n to recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n ``<...some useful description...>`` should be returned. The return\n value must be a string object. If a class defines ``__repr__()``\n but not ``__str__()``, then ``__repr__()`` is also used when an\n "informal" string representation of instances of that class is\n required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by ``str(object)`` and the built-in functions ``format()``\n and ``print()`` to compute the "informal" or nicely printable\n string representation of an object. The return value must be a\n *string* object.\n\n This method differs from ``object.__repr__()`` in that there is no\n expectation that ``__str__()`` return a valid Python expression: a\n more convenient or concise representation can be used.\n\n The default implementation defined by the built-in type ``object``\n calls ``object.__repr__()``.\n\nobject.__bytes__(self)\n\n Called by ``bytes()`` to compute a byte-string representation of an\n object. This should return a ``bytes`` object.\n\nobject.__format__(self, format_spec)\n\n Called by the ``format()`` built-in function (and by extension, the\n ``str.format()`` method of class ``str``) to produce a "formatted"\n string representation of an object. The ``format_spec`` argument is\n a string that contains a description of the formatting options\n desired. The interpretation of the ``format_spec`` argument is up\n to the type implementing ``__format__()``, however most classes\n will either delegate formatting to one of the built-in types, or\n use a similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: ``xy`` calls ``x.__gt__(y)``, and ``x>=y`` calls\n ``x.__ge__(y)``.\n\n A rich comparison method may return the singleton\n ``NotImplemented`` if it does not implement the operation for a\n given pair of arguments. By convention, ``False`` and ``True`` are\n returned for a successful comparison. However, these methods can\n return any value, so if the comparison operator is used in a\n Boolean context (e.g., in the condition of an ``if`` statement),\n Python will call ``bool()`` on the value to determine if the result\n is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\n Accordingly, when defining ``__eq__()``, one should also define\n ``__ne__()`` so that the operators will behave as expected. See\n the paragraph on ``__hash__()`` for some important notes on\n creating *hashable* objects which support custom comparison\n operations and are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\n other\'s reflection, ``__le__()`` and ``__ge__()`` are each other\'s\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\n reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see ``functools.total_ordering()``.\n\nobject.__hash__(self)\n\n Called by built-in function ``hash()`` and for operations on\n members of hashed collections including ``set``, ``frozenset``, and\n ``dict``. ``__hash__()`` should return an integer. The only\n required property is that objects which compare equal have the same\n hash value; it is advised to somehow mix together (e.g. using\n exclusive or) the hash values for the components of the object that\n also play a part in comparison of objects.\n\n Note: ``hash()`` truncates the value returned from an object\'s custom\n ``__hash__()`` method to the size of a ``Py_ssize_t``. This is\n typically 8 bytes on 64-bit builds and 4 bytes on 32-bit builds.\n If an object\'s ``__hash__()`` must interoperate on builds of\n different bit sizes, be sure to check the width on all supported\n builds. An easy way to do this is with ``python -c "import sys;\n print(sys.hash_info.width)"``\n\n If a class does not define an ``__eq__()`` method it should not\n define a ``__hash__()`` operation either; if it defines\n ``__eq__()`` but not ``__hash__()``, its instances will not be\n usable as items in hashable collections. If a class defines\n mutable objects and implements an ``__eq__()`` method, it should\n not implement ``__hash__()``, since the implementation of hashable\n collections requires that a key\'s hash value is immutable (if the\n object\'s hash value changes, it will be in the wrong hash bucket).\n\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\n by default; with them, all objects compare unequal (except with\n themselves) and ``x.__hash__()`` returns an appropriate value such\n that ``x == y`` implies both that ``x is y`` and ``hash(x) ==\n hash(y)``.\n\n A class that overrides ``__eq__()`` and does not define\n ``__hash__()`` will have its ``__hash__()`` implicitly set to\n ``None``. When the ``__hash__()`` method of a class is ``None``,\n instances of the class will raise an appropriate ``TypeError`` when\n a program attempts to retrieve their hash value, and will also be\n correctly identified as unhashable when checking ``isinstance(obj,\n collections.Hashable``).\n\n If a class that overrides ``__eq__()`` needs to retain the\n implementation of ``__hash__()`` from a parent class, the\n interpreter must be told this explicitly by setting ``__hash__ =\n .__hash__``.\n\n If a class that does not override ``__eq__()`` wishes to suppress\n hash support, it should include ``__hash__ = None`` in the class\n definition. A class which defines its own ``__hash__()`` that\n explicitly raises a ``TypeError`` would be incorrectly identified\n as hashable by an ``isinstance(obj, collections.Hashable)`` call.\n\n Note: By default, the ``__hash__()`` values of str, bytes and datetime\n objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the iteration order of\n dicts, sets and other mappings. Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also ``PYTHONHASHSEED``.\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n ``bool()``; should return ``False`` or ``True``. When this method\n is not defined, ``__len__()`` is called, if it is defined, and the\n object is considered true if its result is nonzero. If a class\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\n are considered true.\n\n\nCustomizing attribute access\n============================\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of ``x.name``)\nfor class instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for ``self``). ``name`` is the attribute name.\n This method should return the (computed) attribute value or raise\n an ``AttributeError`` exception.\n\n Note that if the attribute is found through the normal mechanism,\n ``__getattr__()`` is not called. (This is an intentional asymmetry\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\n for efficiency reasons and because otherwise ``__getattr__()``\n would have no way to access other attributes of the instance. Note\n that at least for instance variables, you can fake total control by\n not inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n ``__getattribute__()`` method below for a way to actually get total\n control over attribute access.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines\n ``__getattr__()``, the latter will not be called unless\n ``__getattribute__()`` either calls it explicitly or raises an\n ``AttributeError``. This method should return the (computed)\n attribute value or raise an ``AttributeError`` exception. In order\n to avoid infinite recursion in this method, its implementation\n should always call the base class method with the same name to\n access any attributes it needs, for example,\n ``object.__getattribute__(self, name)``.\n\n Note: This method may still be bypassed when looking up special methods\n as the result of implicit invocation via language syntax or\n built-in functions. See *Special method lookup*.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If ``__setattr__()`` wants to assign to an instance attribute, it\n should call the base class method with the same name, for example,\n ``object.__setattr__(self, name, value)``.\n\nobject.__delattr__(self, name)\n\n Like ``__setattr__()`` but for attribute deletion instead of\n assignment. This should only be implemented if ``del obj.name`` is\n meaningful for the object.\n\nobject.__dir__(self)\n\n Called when ``dir()`` is called on the object. A sequence must be\n returned. ``dir()`` converts the returned sequence to a list and\n sorts it.\n\n\nImplementing Descriptors\n------------------------\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' ``__dict__``.\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or ``None`` when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an\n ``AttributeError`` exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\n\nInvoking Descriptors\n--------------------\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\n``__delete__()``. If any of those methods are defined for an object,\nit is said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, ``a.x`` has a\nlookup chain starting with ``a.__dict__[\'x\']``, then\n``type(a).__dict__[\'x\']``, and continuing through the base classes of\n``type(a)`` excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called.\n\nThe starting point for descriptor invocation is a binding, ``a.x``.\nHow the arguments are assembled depends on ``a``:\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: ``x.__get__(a)``.\n\nInstance Binding\n If binding to an object instance, ``a.x`` is transformed into the\n call: ``type(a).__dict__[\'x\'].__get__(a, type(a))``.\n\nClass Binding\n If binding to a class, ``A.x`` is transformed into the call:\n ``A.__dict__[\'x\'].__get__(None, A)``.\n\nSuper Binding\n If ``a`` is an instance of ``super``, then the binding ``super(B,\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\n ``A`` immediately preceding ``B`` and then invokes the descriptor\n with the call: ``A.__dict__[\'m\'].__get__(obj, obj.__class__)``.\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\nIf it does not define ``__get__()``, then accessing the attribute will\nreturn the descriptor object itself unless there is a value in the\nobject\'s instance dictionary. If the descriptor defines ``__set__()``\nand/or ``__delete__()``, it is a data descriptor; if it defines\nneither, it is a non-data descriptor. Normally, data descriptors\ndefine both ``__get__()`` and ``__set__()``, while non-data\ndescriptors have just the ``__get__()`` method. Data descriptors with\n``__set__()`` and ``__get__()`` defined always override a redefinition\nin an instance dictionary. In contrast, non-data descriptors can be\noverridden by instances.\n\nPython methods (including ``staticmethod()`` and ``classmethod()``)\nare implemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe ``property()`` function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n---------\n\nBy default, instances of classes have a dictionary for attribute\nstorage. This wastes space for objects having very few instance\nvariables. The space consumption can become acute when creating large\nnumbers of instances.\n\nThe default can be overridden by defining *__slots__* in a class\ndefinition. The *__slots__* declaration takes a sequence of instance\nvariables and reserves just enough space in each instance to hold a\nvalue for each variable. Space is saved because *__dict__* is not\ncreated for each instance.\n\nobject.__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n class, *__slots__* reserves space for the declared variables and\n prevents the automatic creation of *__dict__* and *__weakref__* for\n each instance.\n\n\nNotes on using *__slots__*\n~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises ``AttributeError``. If\n dynamic assignment of new variables is desired, then add\n ``\'__dict__\'`` to the sequence of strings in the *__slots__*\n declaration.\n\n* Without a *__weakref__* variable for each instance, classes defining\n *__slots__* do not support weak references to its instances. If weak\n reference support is needed, then add ``\'__weakref__\'`` to the\n sequence of strings in the *__slots__* declaration.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the instance\n variable defined by the base class slot is inaccessible (except by\n retrieving its descriptor directly from the base class). This\n renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as ``int``, ``str`` and\n ``tuple``.\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\n also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n\n\nCustomizing class creation\n==========================\n\nBy default, classes are constructed using ``type()``. The class body\nis executed in a new namespace and the class name is bound locally to\nthe result of ``type(name, bases, namespace)``.\n\nThe class creation process can be customised by passing the\n``metaclass`` keyword argument in the class definition line, or by\ninheriting from an existing class that included such an argument. In\nthe following example, both ``MyClass`` and ``MySubclass`` are\ninstances of ``Meta``:\n\n class Meta(type):\n pass\n\n class MyClass(metaclass=Meta):\n pass\n\n class MySubclass(MyClass):\n pass\n\nAny other keyword arguments that are specified in the class definition\nare passed through to all metaclass operations described below.\n\nWhen a class definition is executed, the following steps occur:\n\n* the appropriate metaclass is determined\n\n* the class namespace is prepared\n\n* the class body is executed\n\n* the class object is created\n\n\nDetermining the appropriate metaclass\n-------------------------------------\n\nThe appropriate metaclass for a class definition is determined as\nfollows:\n\n* if no bases and no explicit metaclass are given, then ``type()`` is\n used\n\n* if an explicit metaclass is given and it is *not* an instance of\n ``type()``, then it is used directly as the metaclass\n\n* if an instance of ``type()`` is given as the explicit metaclass, or\n bases are defined, then the most derived metaclass is used\n\nThe most derived metaclass is selected from the explicitly specified\nmetaclass (if any) and the metaclasses (i.e. ``type(cls)``) of all\nspecified base classes. The most derived metaclass is one which is a\nsubtype of *all* of these candidate metaclasses. If none of the\ncandidate metaclasses meets that criterion, then the class definition\nwill fail with ``TypeError``.\n\n\nPreparing the class namespace\n-----------------------------\n\nOnce the appropriate metaclass has been identified, then the class\nnamespace is prepared. If the metaclass has a ``__prepare__``\nattribute, it is called as ``namespace = metaclass.__prepare__(name,\nbases, **kwds)`` (where the additional keyword arguments, if any, come\nfrom the class definition).\n\nIf the metaclass has no ``__prepare__`` attribute, then the class\nnamespace is initialised as an empty ``dict()`` instance.\n\nSee also:\n\n **PEP 3115** - Metaclasses in Python 3000\n Introduced the ``__prepare__`` namespace hook\n\n\nExecuting the class body\n------------------------\n\nThe class body is executed (approximately) as ``exec(body, globals(),\nnamespace)``. The key difference from a normal call to ``exec()`` is\nthat lexical scoping allows the class body (including any methods) to\nreference names from the current and outer scopes when the class\ndefinition occurs inside a function.\n\nHowever, even when the class definition occurs inside the function,\nmethods defined inside the class still cannot see names defined at the\nclass scope. Class variables must be accessed through the first\nparameter of instance or class methods, and cannot be accessed at all\nfrom static methods.\n\n\nCreating the class object\n-------------------------\n\nOnce the class namespace has been populated by executing the class\nbody, the class object is created by calling ``metaclass(name, bases,\nnamespace, **kwds)`` (the additional keywords passed here are the same\nas those passed to ``__prepare__``).\n\nThis class object is the one that will be referenced by the zero-\nargument form of ``super()``. ``__class__`` is an implicit closure\nreference created by the compiler if any methods in a class body refer\nto either ``__class__`` or ``super``. This allows the zero argument\nform of ``super()`` to correctly identify the class being defined\nbased on lexical scoping, while the class or instance that was used to\nmake the current call is identified based on the first argument passed\nto the method.\n\nAfter the class object is created, it is passed to the class\ndecorators included in the class definition (if any) and the resulting\nobject is bound in the local namespace as the defined class.\n\nSee also:\n\n **PEP 3135** - New super\n Describes the implicit ``__class__`` closure reference\n\n\nMetaclass example\n-----------------\n\nThe potential uses for metaclasses are boundless. Some ideas that have\nbeen explored include logging, interface checking, automatic\ndelegation, automatic property creation, proxies, frameworks, and\nautomatic resource locking/synchronization.\n\nHere is an example of a metaclass that uses an\n``collections.OrderedDict`` to remember the order that class members\nwere defined:\n\n class OrderedClass(type):\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwds):\n return collections.OrderedDict()\n\n def __new__(cls, name, bases, namespace, **kwds):\n result = type.__new__(cls, name, bases, dict(namespace))\n result.members = tuple(namespace)\n return result\n\n class A(metaclass=OrderedClass):\n def one(self): pass\n def two(self): pass\n def three(self): pass\n def four(self): pass\n\n >>> A.members\n (\'__module__\', \'one\', \'two\', \'three\', \'four\')\n\nWhen the class definition for *A* gets executed, the process begins\nwith calling the metaclass\'s ``__prepare__()`` method which returns an\nempty ``collections.OrderedDict``. That mapping records the methods\nand attributes of *A* as they are defined within the body of the class\nstatement. Once those definitions are executed, the ordered dictionary\nis fully populated and the metaclass\'s ``__new__()`` method gets\ninvoked. That method builds the new type and it saves the ordered\ndictionary keys in an attribute called ``members``.\n\n\nCustomizing instance and subclass checks\n========================================\n\nThe following methods are used to override the default behavior of the\n``isinstance()`` and ``issubclass()`` built-in functions.\n\nIn particular, the metaclass ``abc.ABCMeta`` implements these methods\nin order to allow the addition of Abstract Base Classes (ABCs) as\n"virtual base classes" to any class or type (including built-in\ntypes), including other ABCs.\n\nclass.__instancecheck__(self, instance)\n\n Return true if *instance* should be considered a (direct or\n indirect) instance of *class*. If defined, called to implement\n ``isinstance(instance, class)``.\n\nclass.__subclasscheck__(self, subclass)\n\n Return true if *subclass* should be considered a (direct or\n indirect) subclass of *class*. If defined, called to implement\n ``issubclass(subclass, class)``.\n\nNote that these methods are looked up on the type (metaclass) of a\nclass. They cannot be defined as class methods in the actual class.\nThis is consistent with the lookup of special methods that are called\non instances, only in this case the instance is itself a class.\n\nSee also:\n\n **PEP 3119** - Introducing Abstract Base Classes\n Includes the specification for customizing ``isinstance()`` and\n ``issubclass()`` behavior through ``__instancecheck__()`` and\n ``__subclasscheck__()``, with motivation for this functionality\n in the context of adding Abstract Base Classes (see the ``abc``\n module) to the language.\n\n\nEmulating callable objects\n==========================\n\nobject.__call__(self[, args...])\n\n Called when the instance is "called" as a function; if this method\n is defined, ``x(arg1, arg2, ...)`` is a shorthand for\n ``x.__call__(arg1, arg2, ...)``.\n\n\nEmulating container types\n=========================\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which ``0 <= k < N``\nwhere *N* is the length of the sequence, or slice objects, which\ndefine a range of items. It is also recommended that mappings provide\nthe methods ``keys()``, ``values()``, ``items()``, ``get()``,\n``clear()``, ``setdefault()``, ``pop()``, ``popitem()``, ``copy()``,\nand ``update()`` behaving similar to those for Python\'s standard\ndictionary objects. The ``collections`` module provides a\n``MutableMapping`` abstract base class to help create those methods\nfrom a base set of ``__getitem__()``, ``__setitem__()``,\n``__delitem__()``, and ``keys()``. Mutable sequences should provide\nmethods ``append()``, ``count()``, ``index()``, ``extend()``,\n``insert()``, ``pop()``, ``remove()``, ``reverse()`` and ``sort()``,\nlike Python standard list objects. Finally, sequence types should\nimplement addition (meaning concatenation) and multiplication (meaning\nrepetition) by defining the methods ``__add__()``, ``__radd__()``,\n``__iadd__()``, ``__mul__()``, ``__rmul__()`` and ``__imul__()``\ndescribed below; they should not define other numerical operators. It\nis recommended that both mappings and sequences implement the\n``__contains__()`` method to allow efficient use of the ``in``\noperator; for mappings, ``in`` should search the mapping\'s keys; for\nsequences, it should search through the values. It is further\nrecommended that both mappings and sequences implement the\n``__iter__()`` method to allow efficient iteration through the\ncontainer; for mappings, ``__iter__()`` should be the same as\n``keys()``; for sequences, it should iterate through the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function ``len()``. Should return\n the length of the object, an integer ``>=`` 0. Also, an object\n that doesn\'t define a ``__bool__()`` method and whose ``__len__()``\n method returns zero is considered to be false in a Boolean context.\n\nobject.__length_hint__(self)\n\n Called to implement ``operator.length_hint()``. Should return an\n estimated length for the object (which may be greater or less than\n the actual length). The length must be an integer ``>=`` 0. This\n method is purely an optimization and is never required for\n correctness.\n\n New in version 3.4.\n\nNote: Slicing is done exclusively with the following three methods. A\n call like\n\n a[1:2] = b\n\n is translated to\n\n a[slice(1, 2, None)] = b\n\n and so forth. Missing slice items are always filled in with\n ``None``.\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of ``self[key]``. For sequence\n types, the accepted keys should be integers and slice objects.\n Note that the special interpretation of negative indexes (if the\n class wishes to emulate a sequence type) is up to the\n ``__getitem__()`` method. If *key* is of an inappropriate type,\n ``TypeError`` may be raised; if of a value outside the set of\n indexes for the sequence (after any special interpretation of\n negative values), ``IndexError`` should be raised. For mapping\n types, if *key* is missing (not in the container), ``KeyError``\n should be raised.\n\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the ``__getitem__()`` method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the ``__getitem__()``\n method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method ``keys()``.\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the ``reversed()`` built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the ``__reversed__()`` method is not provided, the\n ``reversed()`` built-in will fall back to using the sequence\n protocol (``__len__()`` and ``__getitem__()``). Objects that\n support the sequence protocol should only provide\n ``__reversed__()`` if they can provide an implementation that is\n more efficient than the one provided by ``reversed()``.\n\nThe membership test operators (``in`` and ``not in``) are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don\'t define ``__contains__()``, the membership\n test first tries iteration via ``__iter__()``, then the old\n sequence iteration protocol via ``__getitem__()``, see *this\n section in the language reference*.\n\n\nEmulating numeric types\n=======================\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__truediv__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\n ``|``). For instance, to evaluate the expression ``x + y``, where\n *x* is an instance of a class that has an ``__add__()`` method,\n ``x.__add__(y)`` is called. The ``__divmod__()`` method should be\n the equivalent to using ``__floordiv__()`` and ``__mod__()``; it\n should not be related to ``__truediv__()``. Note that\n ``__pow__()`` should be defined to accept an optional third\n argument if the ternary version of the built-in ``pow()`` function\n is to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return ``NotImplemented``.\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\n ``|``) with reflected (swapped) operands. These functions are only\n called if the left operand does not support the corresponding\n operation and the operands are of different types. [2] For\n instance, to evaluate the expression ``x - y``, where *y* is an\n instance of a class that has an ``__rsub__()`` method,\n ``y.__rsub__(x)`` is called if ``x.__sub__(y)`` returns\n *NotImplemented*.\n\n Note that ternary ``pow()`` will not try calling ``__rpow__()``\n (the coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left operand\'s\n type and that subclass provides the reflected method for the\n operation, this method will be called before the left operand\'s\n non-reflected method. This behavior allows subclasses to\n override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments (``+=``, ``-=``, ``*=``, ``/=``, ``//=``, ``%=``,\n ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods\n should attempt to do the operation in-place (modifying *self*) and\n return the result (which could be, but does not have to be,\n *self*). If a specific method is not defined, the augmented\n assignment falls back to the normal methods. For instance, to\n execute the statement ``x += y``, where *x* is an instance of a\n class that has an ``__iadd__()`` method, ``x.__iadd__(y)`` is\n called. If *x* is an instance of a class that does not define a\n ``__iadd__()`` method, ``x.__add__(y)`` and ``y.__radd__(x)`` are\n considered, as with the evaluation of ``x + y``.\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations (``-``, ``+``,\n ``abs()`` and ``~``).\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__float__(self)\nobject.__round__(self[, n])\n\n Called to implement the built-in functions ``complex()``,\n ``int()``, ``float()`` and ``round()``. Should return a value of\n the appropriate type.\n\nobject.__index__(self)\n\n Called to implement ``operator.index()``. Also called whenever\n Python needs an integer object (such as in slicing, or in the\n built-in ``bin()``, ``hex()`` and ``oct()`` functions). Must return\n an integer.\n\n\nWith Statement Context Managers\n===============================\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a ``with`` statement. The context\nmanager handles the entry into, and the exit from, the desired runtime\ncontext for the execution of the block of code. Context managers are\nnormally invoked using the ``with`` statement (described in section\n*The with statement*), but can also be used by directly invoking their\nmethods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The ``with``\n statement will bind this method\'s return value to the target(s)\n specified in the ``as`` clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be ``None``.\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that ``__exit__()`` methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n ``with`` statement.\n\n\nSpecial method lookup\n=====================\n\nFor custom classes, implicit invocations of special methods are only\nguaranteed to work correctly if defined on an object\'s type, not in\nthe object\'s instance dictionary. That behaviour is the reason why\nthe following code raises an exception:\n\n >>> class C:\n ... pass\n ...\n >>> c = C()\n >>> c.__len__ = lambda: 5\n >>> len(c)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: object of type \'C\' has no len()\n\nThe rationale behind this behaviour lies with a number of special\nmethods such as ``__hash__()`` and ``__repr__()`` that are implemented\nby all objects, including type objects. If the implicit lookup of\nthese methods used the conventional lookup process, they would fail\nwhen invoked on the type object itself:\n\n >>> 1 .__hash__() == hash(1)\n True\n >>> int.__hash__() == hash(int)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: descriptor \'__hash__\' of \'int\' object needs an argument\n\nIncorrectly attempting to invoke an unbound method of a class in this\nway is sometimes referred to as \'metaclass confusion\', and is avoided\nby bypassing the instance when looking up special methods:\n\n >>> type(1).__hash__(1) == hash(1)\n True\n >>> type(int).__hash__(int) == hash(int)\n True\n\nIn addition to bypassing any instance attributes in the interest of\ncorrectness, implicit special method lookup generally also bypasses\nthe ``__getattribute__()`` method even of the object\'s metaclass:\n\n >>> class Meta(type):\n ... def __getattribute__(*args):\n ... print("Metaclass getattribute invoked")\n ... return type.__getattribute__(*args)\n ...\n >>> class C(object, metaclass=Meta):\n ... def __len__(self):\n ... return 10\n ... def __getattribute__(*args):\n ... print("Class getattribute invoked")\n ... return object.__getattribute__(*args)\n ...\n >>> c = C()\n >>> c.__len__() # Explicit lookup via instance\n Class getattribute invoked\n 10\n >>> type(c).__len__(c) # Explicit lookup via type\n Metaclass getattribute invoked\n 10\n >>> len(c) # Implicit lookup\n 10\n\nBypassing the ``__getattribute__()`` machinery in this fashion\nprovides significant scope for speed optimisations within the\ninterpreter, at the cost of some flexibility in the handling of\nspecial methods (the special method *must* be set on the class object\nitself in order to be consistently invoked by the interpreter).\n\n-[ Footnotes ]-\n\n[1] It *is* possible in some cases to change an object\'s type, under\n certain controlled conditions. It generally isn\'t a good idea\n though, since it can lead to some very strange behaviour if it is\n handled incorrectly.\n\n[2] For operands of the same type, it is assumed that if the non-\n reflected method (such as ``__add__()``) fails the operation is\n not supported, which is why the reflected method is not called.\n', + 'string-methods': '\nString Methods\n**************\n\nStrings implement all of the *common* sequence operations, along with\nthe additional methods described below.\n\nStrings also support two styles of string formatting, one providing a\nlarge degree of flexibility and customization (see ``str.format()``,\n*Format String Syntax* and *String Formatting*) and the other based on\nC ``printf`` style formatting that handles a narrower range of types\nand is slightly harder to use correctly, but is often faster for the\ncases it can handle (*printf-style String Formatting*).\n\nThe *Text Processing Services* section of the standard library covers\na number of other modules that provide various text related utilities\n(including regular expression support in the ``re`` module).\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\nstr.casefold()\n\n Return a casefolded copy of the string. Casefolded strings may be\n used for caseless matching.\n\n Casefolding is similar to lowercasing but more aggressive because\n it is intended to remove all case distinctions in a string. For\n example, the German lowercase letter ``\'\xc3\x9f\'`` is equivalent to\n ``"ss"``. Since it is already lowercase, ``lower()`` would do\n nothing to ``\'\xc3\x9f\'``; ``casefold()`` converts it to ``"ss"``.\n\n The casefolding algorithm is described in section 3.13 of the\n Unicode Standard.\n\n New in version 3.3.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.encode(encoding="utf-8", errors="strict")\n\n Return an encoded version of the string as a bytes object. Default\n encoding is ``\'utf-8\'``. *errors* may be given to set a different\n error handling scheme. The default for *errors* is ``\'strict\'``,\n meaning that encoding errors raise a ``UnicodeError``. Other\n possible values are ``\'ignore\'``, ``\'replace\'``,\n ``\'xmlcharrefreplace\'``, ``\'backslashreplace\'`` and any other name\n registered via ``codecs.register_error()``, see section *Codec Base\n Classes*. For a list of possible encodings, see section *Standard\n Encodings*.\n\n Changed in version 3.1: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return ``True`` if the string ends with the specified *suffix*,\n otherwise return ``False``. *suffix* can also be a tuple of\n suffixes to look for. With optional *start*, test beginning at\n that position. With optional *end*, stop comparing at that\n position.\n\nstr.expandtabs([tabsize])\n\n Return a copy of the string where all tab characters are replaced\n by one or more spaces, depending on the current column and the\n given tab size. Tab positions occur every *tabsize* characters\n (default is 8, giving tab positions at columns 0, 8, 16 and so on).\n To expand the string, the current column is set to zero and the\n string is examined character by character. If the character is a\n tab (``\\t``), one or more space characters are inserted in the\n result until the current column is equal to the next tab position.\n (The tab character itself is not copied.) If the character is a\n newline (``\\n``) or return (``\\r``), it is copied and the current\n column is reset to zero. Any other character is copied unchanged\n and the current column is incremented by one regardless of how the\n character is represented when printed.\n\n >>> \'01\\t012\\t0123\\t01234\'.expandtabs()\n \'01 012 0123 01234\'\n >>> \'01\\t012\\t0123\\t01234\'.expandtabs(4)\n \'01 012 0123 01234\'\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` if *sub* is not found.\n\n Note: The ``find()`` method should be used only if you need to know the\n position of *sub*. To check if *sub* is a substring or not, use\n the ``in`` operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces ``{}``. Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\nstr.format_map(mapping)\n\n Similar to ``str.format(**mapping)``, except that ``mapping`` is\n used directly and not copied to a ``dict`` . This is useful if for\n example ``mapping`` is a dict subclass:\n\n >>> class Default(dict):\n ... def __missing__(self, key):\n ... return key\n ...\n >>> \'{name} was born in {country}\'.format_map(Default(name=\'Guido\'))\n \'Guido was born in country\'\n\n New in version 3.2.\n\nstr.index(sub[, start[, end]])\n\n Like ``find()``, but raise ``ValueError`` when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise. A character\n ``c`` is alphanumeric if one of the following returns ``True``:\n ``c.isalpha()``, ``c.isdecimal()``, ``c.isdigit()``, or\n ``c.isnumeric()``.\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise. Alphabetic\n characters are those characters defined in the Unicode character\n database as "Letter", i.e., those with general category property\n being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is\n different from the "Alphabetic" property defined in the Unicode\n Standard.\n\nstr.isdecimal()\n\n Return true if all characters in the string are decimal characters\n and there is at least one character, false otherwise. Decimal\n characters are those from general category "Nd". This category\n includes digit characters, and all characters that can be used to\n form decimal-radix numbers, e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise. Digits include decimal\n characters and digits that need special handling, such as the\n compatibility superscript digits. Formally, a digit is a character\n that has the property value Numeric_Type=Digit or\n Numeric_Type=Decimal.\n\nstr.isidentifier()\n\n Return true if the string is a valid identifier according to the\n language definition, section *Identifiers and keywords*.\n\n Use ``keyword.iskeyword()`` to test for reserved identifiers such\n as ``def`` and ``class``.\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\nstr.isnumeric()\n\n Return true if all characters in the string are numeric characters,\n and there is at least one character, false otherwise. Numeric\n characters include digit characters, and all characters that have\n the Unicode numeric value property, e.g. U+2155, VULGAR FRACTION\n ONE FIFTH. Formally, numeric characters are those with the\n property value Numeric_Type=Digit, Numeric_Type=Decimal or\n Numeric_Type=Numeric.\n\nstr.isprintable()\n\n Return true if all characters in the string are printable or the\n string is empty, false otherwise. Nonprintable characters are\n those characters defined in the Unicode character database as\n "Other" or "Separator", excepting the ASCII space (0x20) which is\n considered printable. (Note that printable characters in this\n context are those which should not be escaped when ``repr()`` is\n invoked on a string. It has no bearing on the handling of strings\n written to ``sys.stdout`` or ``sys.stderr``.)\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise. Whitespace\n characters are those characters defined in the Unicode character\n database as "Other" or "Separator" and those with bidirectional\n property being one of "WS", "B", or "S".\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. A ``TypeError`` will be raised if there are\n any non-string values in *iterable*, including ``bytes`` objects.\n The separator between elements is the string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\n The lowercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\nstatic str.maketrans(x[, y[, z]])\n\n This static method returns a translation table usable for\n ``str.translate()``.\n\n If there is only one argument, it must be a dictionary mapping\n Unicode ordinals (integers) or characters (strings of length 1) to\n Unicode ordinals, strings (of arbitrary lengths) or None.\n Character keys will then be converted to ordinals.\n\n If there are two arguments, they must be strings of equal length,\n and in the resulting dictionary, each character in x will be mapped\n to the character at the same position in y. If there is a third\n argument, it must be a string, whose characters will be mapped to\n None in the result.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\n is not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\nstr.rsplit(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n ``None``, any whitespace string is a separator. Except for\n splitting from the right, ``rsplit()`` behaves like ``split()``\n which is described in detail below.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\nstr.split(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most ``maxsplit+1``\n elements). If *maxsplit* is not specified or ``-1``, then there is\n no limit on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n ``\'1,,2\'.split(\',\')`` returns ``[\'1\', \'\', \'2\']``). The *sep*\n argument may consist of multiple characters (for example,\n ``\'1<>2<>3\'.split(\'<>\')`` returns ``[\'1\', \'2\', \'3\']``). Splitting\n an empty string with a specified separator returns ``[\'\']``.\n\n If *sep* is not specified or is ``None``, a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a ``None`` separator returns\n ``[]``.\n\n For example, ``\' 1 2 3 \'.split()`` returns ``[\'1\', \'2\', \'3\']``,\n and ``\' 1 2 3 \'.split(None, 1)`` returns ``[\'1\', \'2 3 \']``.\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. This method uses the *universal newlines* approach to\n splitting lines. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\n For example, ``\'ab c\\n\\nde fg\\rkl\\r\\n\'.splitlines()`` returns\n ``[\'ab c\', \'\', \'de fg\', \'kl\']``, while the same call with\n ``splitlines(True)`` returns ``[\'ab c\\n\', \'\\n\', \'de fg\\r\',\n \'kl\\r\\n\']``.\n\n Unlike ``split()`` when a delimiter string *sep* is given, this\n method returns an empty list for the empty string, and a terminal\n line break does not result in an extra line.\n\nstr.startswith(prefix[, start[, end]])\n\n Return ``True`` if string starts with the *prefix*, otherwise\n return ``False``. *prefix* can also be a tuple of prefixes to look\n for. With optional *start*, test string beginning at that\n position. With optional *end*, stop comparing string at that\n position.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or ``None``, the\n *chars* argument defaults to removing whitespace. The *chars*\n argument is not a prefix or suffix; rather, all combinations of its\n values are stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa. Note that it is not necessarily true that\n ``s.swapcase().swapcase() == s``.\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n ... lambda mo: mo.group(0)[0].upper() +\n ... mo.group(0)[1:].lower(),\n ... s)\n ...\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\nstr.translate(map)\n\n Return a copy of the *s* where all characters have been mapped\n through the *map* which must be a dictionary of Unicode ordinals\n (integers) to Unicode ordinals, strings or ``None``. Unmapped\n characters are left untouched. Characters mapped to ``None`` are\n deleted.\n\n You can use ``str.maketrans()`` to create a translation map from\n character-to-character mappings in different formats.\n\n Note: An even more flexible approach is to create a custom character\n mapping codec using the ``codecs`` module (see\n ``encodings.cp1251`` for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that ``str.upper().isupper()`` might\n be ``False`` if ``s`` contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\n The uppercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to ``len(s)``.\n', 'strings': '\nString and Bytes literals\n*************************\n\nString literals are described by the following lexical definitions:\n\n stringliteral ::= [stringprefix](shortstring | longstring)\n stringprefix ::= "r" | "u" | "R" | "U"\n shortstring ::= "\'" shortstringitem* "\'" | \'"\' shortstringitem* \'"\'\n longstring ::= "\'\'\'" longstringitem* "\'\'\'" | \'"""\' longstringitem* \'"""\'\n shortstringitem ::= shortstringchar | stringescapeseq\n longstringitem ::= longstringchar | stringescapeseq\n shortstringchar ::= \n longstringchar ::= \n stringescapeseq ::= "\\" \n\n bytesliteral ::= bytesprefix(shortbytes | longbytes)\n bytesprefix ::= "b" | "B" | "br" | "Br" | "bR" | "BR" | "rb" | "rB" | "Rb" | "RB"\n shortbytes ::= "\'" shortbytesitem* "\'" | \'"\' shortbytesitem* \'"\'\n longbytes ::= "\'\'\'" longbytesitem* "\'\'\'" | \'"""\' longbytesitem* \'"""\'\n shortbytesitem ::= shortbyteschar | bytesescapeseq\n longbytesitem ::= longbyteschar | bytesescapeseq\n shortbyteschar ::= \n longbyteschar ::= \n bytesescapeseq ::= "\\" \n\nOne syntactic restriction not indicated by these productions is that\nwhitespace is not allowed between the ``stringprefix`` or\n``bytesprefix`` and the rest of the literal. The source character set\nis defined by the encoding declaration; it is UTF-8 if no encoding\ndeclaration is given in the source file; see section *Encoding\ndeclarations*.\n\nIn plain English: Both types of literals can be enclosed in matching\nsingle quotes (``\'``) or double quotes (``"``). They can also be\nenclosed in matching groups of three single or double quotes (these\nare generally referred to as *triple-quoted strings*). The backslash\n(``\\``) character is used to escape characters that otherwise have a\nspecial meaning, such as newline, backslash itself, or the quote\ncharacter.\n\nBytes literals are always prefixed with ``\'b\'`` or ``\'B\'``; they\nproduce an instance of the ``bytes`` type instead of the ``str`` type.\nThey may only contain ASCII characters; bytes with a numeric value of\n128 or greater must be expressed with escapes.\n\nAs of Python 3.3 it is possible again to prefix unicode strings with a\n``u`` prefix to simplify maintenance of dual 2.x and 3.x codebases.\n\nBoth string and bytes literals may optionally be prefixed with a\nletter ``\'r\'`` or ``\'R\'``; such strings are called *raw strings* and\ntreat backslashes as literal characters. As a result, in string\nliterals, ``\'\\U\'`` and ``\'\\u\'`` escapes in raw strings are not treated\nspecially. Given that Python 2.x\'s raw unicode literals behave\ndifferently than Python 3.x\'s the ``\'ur\'`` syntax is not supported.\n\n New in version 3.3: The ``\'rb\'`` prefix of raw bytes literals has\n been added as a synonym of ``\'br\'``.\n\n New in version 3.3: Support for the unicode legacy literal\n (``u\'value\'``) was reintroduced to simplify the maintenance of dual\n Python 2.x and 3.x codebases. See **PEP 414** for more information.\n\nIn triple-quoted strings, unescaped newlines and quotes are allowed\n(and are retained), except that three unescaped quotes in a row\nterminate the string. (A "quote" is the character used to open the\nstring, i.e. either ``\'`` or ``"``.)\n\nUnless an ``\'r\'`` or ``\'R\'`` prefix is present, escape sequences in\nstrings are interpreted according to rules similar to those used by\nStandard C. The recognized escape sequences are:\n\n+-------------------+-----------------------------------+---------+\n| Escape Sequence | Meaning | Notes |\n+===================+===================================+=========+\n| ``\\newline`` | Backslash and newline ignored | |\n+-------------------+-----------------------------------+---------+\n| ``\\\\`` | Backslash (``\\``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\\'`` | Single quote (``\'``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\"`` | Double quote (``"``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\a`` | ASCII Bell (BEL) | |\n+-------------------+-----------------------------------+---------+\n| ``\\b`` | ASCII Backspace (BS) | |\n+-------------------+-----------------------------------+---------+\n| ``\\f`` | ASCII Formfeed (FF) | |\n+-------------------+-----------------------------------+---------+\n| ``\\n`` | ASCII Linefeed (LF) | |\n+-------------------+-----------------------------------+---------+\n| ``\\r`` | ASCII Carriage Return (CR) | |\n+-------------------+-----------------------------------+---------+\n| ``\\t`` | ASCII Horizontal Tab (TAB) | |\n+-------------------+-----------------------------------+---------+\n| ``\\v`` | ASCII Vertical Tab (VT) | |\n+-------------------+-----------------------------------+---------+\n| ``\\ooo`` | Character with octal value *ooo* | (1,3) |\n+-------------------+-----------------------------------+---------+\n| ``\\xhh`` | Character with hex value *hh* | (2,3) |\n+-------------------+-----------------------------------+---------+\n\nEscape sequences only recognized in string literals are:\n\n+-------------------+-----------------------------------+---------+\n| Escape Sequence | Meaning | Notes |\n+===================+===================================+=========+\n| ``\\N{name}`` | Character named *name* in the | (4) |\n| | Unicode database | |\n+-------------------+-----------------------------------+---------+\n| ``\\uxxxx`` | Character with 16-bit hex value | (5) |\n| | *xxxx* | |\n+-------------------+-----------------------------------+---------+\n| ``\\Uxxxxxxxx`` | Character with 32-bit hex value | (6) |\n| | *xxxxxxxx* | |\n+-------------------+-----------------------------------+---------+\n\nNotes:\n\n1. As in Standard C, up to three octal digits are accepted.\n\n2. Unlike in Standard C, exactly two hex digits are required.\n\n3. In a bytes literal, hexadecimal and octal escapes denote the byte\n with the given value. In a string literal, these escapes denote a\n Unicode character with the given value.\n\n4. Changed in version 3.3: Support for name aliases [1] has been\n added.\n\n5. Individual code units which form parts of a surrogate pair can be\n encoded using this escape sequence. Exactly four hex digits are\n required.\n\n6. Any Unicode character can be encoded this way. Exactly eight hex\n digits are required.\n\nUnlike Standard C, all unrecognized escape sequences are left in the\nstring unchanged, i.e., *the backslash is left in the string*. (This\nbehavior is useful when debugging: if an escape sequence is mistyped,\nthe resulting output is more easily recognized as broken.) It is also\nimportant to note that the escape sequences only recognized in string\nliterals fall into the category of unrecognized escapes for bytes\nliterals.\n\nEven in a raw string, string quotes can be escaped with a backslash,\nbut the backslash remains in the string; for example, ``r"\\""`` is a\nvalid string literal consisting of two characters: a backslash and a\ndouble quote; ``r"\\"`` is not a valid string literal (even a raw\nstring cannot end in an odd number of backslashes). Specifically, *a\nraw string cannot end in a single backslash* (since the backslash\nwould escape the following quote character). Note also that a single\nbackslash followed by a newline is interpreted as those two characters\nas part of the string, *not* as a line continuation.\n', 'subscriptions': '\nSubscriptions\n*************\n\nA subscription selects an item of a sequence (string, tuple or list)\nor mapping (dictionary) object:\n\n subscription ::= primary "[" expression_list "]"\n\nThe primary must evaluate to an object that supports subscription,\ne.g. a list or dictionary. User-defined objects can support\nsubscription by defining a ``__getitem__()`` method.\n\nFor built-in objects, there are two types of objects that support\nsubscription:\n\nIf the primary is a mapping, the expression list must evaluate to an\nobject whose value is one of the keys of the mapping, and the\nsubscription selects the value in the mapping that corresponds to that\nkey. (The expression list is a tuple except if it has exactly one\nitem.)\n\nIf the primary is a sequence, the expression (list) must evaluate to\nan integer or a slice (as discussed in the following section).\n\nThe formal syntax makes no special provision for negative indices in\nsequences; however, built-in sequences all provide a ``__getitem__()``\nmethod that interprets negative indices by adding the length of the\nsequence to the index (so that ``x[-1]`` selects the last item of\n``x``). The resulting value must be a nonnegative integer less than\nthe number of items in the sequence, and the subscription selects the\nitem whose index is that value (counting from zero). Since the support\nfor negative indices and slicing occurs in the object\'s\n``__getitem__()`` method, subclasses overriding this method will need\nto explicitly add that support.\n\nA string\'s items are characters. A character is not a separate data\ntype but a string of exactly one character.\n', 'truth': "\nTruth Value Testing\n*******************\n\nAny object can be tested for truth value, for use in an ``if`` or\n``while`` condition or as operand of the Boolean operations below. The\nfollowing values are considered false:\n\n* ``None``\n\n* ``False``\n\n* zero of any numeric type, for example, ``0``, ``0.0``, ``0j``.\n\n* any empty sequence, for example, ``''``, ``()``, ``[]``.\n\n* any empty mapping, for example, ``{}``.\n\n* instances of user-defined classes, if the class defines a\n ``__bool__()`` or ``__len__()`` method, when that method returns the\n integer zero or ``bool`` value ``False``. [1]\n\nAll other values are considered true --- so objects of many types are\nalways true.\n\nOperations and built-in functions that have a Boolean result always\nreturn ``0`` or ``False`` for false and ``1`` or ``True`` for true,\nunless otherwise stated. (Important exception: the Boolean operations\n``or`` and ``and`` always return one of their operands.)\n", @@ -71,7 +71,7 @@ 'typesmapping': '\nMapping Types --- ``dict``\n**************************\n\nA *mapping* object maps *hashable* values to arbitrary objects.\nMappings are mutable objects. There is currently only one standard\nmapping type, the *dictionary*. (For other containers see the built-\nin ``list``, ``set``, and ``tuple`` classes, and the ``collections``\nmodule.)\n\nA dictionary\'s keys are *almost* arbitrary values. Values that are\nnot *hashable*, that is, values containing lists, dictionaries or\nother mutable types (that are compared by value rather than by object\nidentity) may not be used as keys. Numeric types used for keys obey\nthe normal rules for numeric comparison: if two numbers compare equal\n(such as ``1`` and ``1.0``) then they can be used interchangeably to\nindex the same dictionary entry. (Note however, that since computers\nstore floating-point numbers as approximations it is usually unwise to\nuse them as dictionary keys.)\n\nDictionaries can be created by placing a comma-separated list of\n``key: value`` pairs within braces, for example: ``{\'jack\': 4098,\n\'sjoerd\': 4127}`` or ``{4098: \'jack\', 4127: \'sjoerd\'}``, or by the\n``dict`` constructor.\n\nclass class dict(**kwarg)\nclass class dict(mapping, **kwarg)\nclass class dict(iterable, **kwarg)\n\n Return a new dictionary initialized from an optional positional\n argument and a possibly empty set of keyword arguments.\n\n If no positional argument is given, an empty dictionary is created.\n If a positional argument is given and it is a mapping object, a\n dictionary is created with the same key-value pairs as the mapping\n object. Otherwise, the positional argument must be an *iterator*\n object. Each item in the iterable must itself be an iterator with\n exactly two objects. The first object of each item becomes a key\n in the new dictionary, and the second object the corresponding\n value. If a key occurs more than once, the last value for that key\n becomes the corresponding value in the new dictionary.\n\n If keyword arguments are given, the keyword arguments and their\n values are added to the dictionary created from the positional\n argument. If a key being added is already present, the value from\n the keyword argument replaces the value from the positional\n argument.\n\n To illustrate, the following examples all return a dictionary equal\n to ``{"one": 1, "two": 2, "three": 3}``:\n\n >>> a = dict(one=1, two=2, three=3)\n >>> b = {\'one\': 1, \'two\': 2, \'three\': 3}\n >>> c = dict(zip([\'one\', \'two\', \'three\'], [1, 2, 3]))\n >>> d = dict([(\'two\', 2), (\'one\', 1), (\'three\', 3)])\n >>> e = dict({\'three\': 3, \'one\': 1, \'two\': 2})\n >>> a == b == c == d == e\n True\n\n Providing keyword arguments as in the first example only works for\n keys that are valid Python identifiers. Otherwise, any valid keys\n can be used.\n\n These are the operations that dictionaries support (and therefore,\n custom mapping types should support too):\n\n len(d)\n\n Return the number of items in the dictionary *d*.\n\n d[key]\n\n Return the item of *d* with key *key*. Raises a ``KeyError`` if\n *key* is not in the map.\n\n If a subclass of dict defines a method ``__missing__()``, if the\n key *key* is not present, the ``d[key]`` operation calls that\n method with the key *key* as argument. The ``d[key]`` operation\n then returns or raises whatever is returned or raised by the\n ``__missing__(key)`` call if the key is not present. No other\n operations or methods invoke ``__missing__()``. If\n ``__missing__()`` is not defined, ``KeyError`` is raised.\n ``__missing__()`` must be a method; it cannot be an instance\n variable:\n\n >>> class Counter(dict):\n ... def __missing__(self, key):\n ... return 0\n >>> c = Counter()\n >>> c[\'red\']\n 0\n >>> c[\'red\'] += 1\n >>> c[\'red\']\n 1\n\n See ``collections.Counter`` for a complete implementation\n including other methods helpful for accumulating and managing\n tallies.\n\n d[key] = value\n\n Set ``d[key]`` to *value*.\n\n del d[key]\n\n Remove ``d[key]`` from *d*. Raises a ``KeyError`` if *key* is\n not in the map.\n\n key in d\n\n Return ``True`` if *d* has a key *key*, else ``False``.\n\n key not in d\n\n Equivalent to ``not key in d``.\n\n iter(d)\n\n Return an iterator over the keys of the dictionary. This is a\n shortcut for ``iter(d.keys())``.\n\n clear()\n\n Remove all items from the dictionary.\n\n copy()\n\n Return a shallow copy of the dictionary.\n\n classmethod fromkeys(seq[, value])\n\n Create a new dictionary with keys from *seq* and values set to\n *value*.\n\n ``fromkeys()`` is a class method that returns a new dictionary.\n *value* defaults to ``None``.\n\n get(key[, default])\n\n Return the value for *key* if *key* is in the dictionary, else\n *default*. If *default* is not given, it defaults to ``None``,\n so that this method never raises a ``KeyError``.\n\n items()\n\n Return a new view of the dictionary\'s items (``(key, value)``\n pairs). See the *documentation of view objects*.\n\n keys()\n\n Return a new view of the dictionary\'s keys. See the\n *documentation of view objects*.\n\n pop(key[, default])\n\n If *key* is in the dictionary, remove it and return its value,\n else return *default*. If *default* is not given and *key* is\n not in the dictionary, a ``KeyError`` is raised.\n\n popitem()\n\n Remove and return an arbitrary ``(key, value)`` pair from the\n dictionary.\n\n ``popitem()`` is useful to destructively iterate over a\n dictionary, as often used in set algorithms. If the dictionary\n is empty, calling ``popitem()`` raises a ``KeyError``.\n\n setdefault(key[, default])\n\n If *key* is in the dictionary, return its value. If not, insert\n *key* with a value of *default* and return *default*. *default*\n defaults to ``None``.\n\n update([other])\n\n Update the dictionary with the key/value pairs from *other*,\n overwriting existing keys. Return ``None``.\n\n ``update()`` accepts either another dictionary object or an\n iterable of key/value pairs (as tuples or other iterables of\n length two). If keyword arguments are specified, the dictionary\n is then updated with those key/value pairs: ``d.update(red=1,\n blue=2)``.\n\n values()\n\n Return a new view of the dictionary\'s values. See the\n *documentation of view objects*.\n\nSee also:\n\n ``types.MappingProxyType`` can be used to create a read-only view\n of a ``dict``.\n\n\nDictionary view objects\n=======================\n\nThe objects returned by ``dict.keys()``, ``dict.values()`` and\n``dict.items()`` are *view objects*. They provide a dynamic view on\nthe dictionary\'s entries, which means that when the dictionary\nchanges, the view reflects these changes.\n\nDictionary views can be iterated over to yield their respective data,\nand support membership tests:\n\nlen(dictview)\n\n Return the number of entries in the dictionary.\n\niter(dictview)\n\n Return an iterator over the keys, values or items (represented as\n tuples of ``(key, value)``) in the dictionary.\n\n Keys and values are iterated over in an arbitrary order which is\n non-random, varies across Python implementations, and depends on\n the dictionary\'s history of insertions and deletions. If keys,\n values and items views are iterated over with no intervening\n modifications to the dictionary, the order of items will directly\n correspond. This allows the creation of ``(value, key)`` pairs\n using ``zip()``: ``pairs = zip(d.values(), d.keys())``. Another\n way to create the same list is ``pairs = [(v, k) for (k, v) in\n d.items()]``.\n\n Iterating views while adding or deleting entries in the dictionary\n may raise a ``RuntimeError`` or fail to iterate over all entries.\n\nx in dictview\n\n Return ``True`` if *x* is in the underlying dictionary\'s keys,\n values or items (in the latter case, *x* should be a ``(key,\n value)`` tuple).\n\nKeys views are set-like since their entries are unique and hashable.\nIf all values are hashable, so that ``(key, value)`` pairs are unique\nand hashable, then the items view is also set-like. (Values views are\nnot treated as set-like since the entries are generally not unique.)\nFor set-like views, all of the operations defined for the abstract\nbase class ``collections.abc.Set`` are available (for example, ``==``,\n``<``, or ``^``).\n\nAn example of dictionary view usage:\n\n >>> dishes = {\'eggs\': 2, \'sausage\': 1, \'bacon\': 1, \'spam\': 500}\n >>> keys = dishes.keys()\n >>> values = dishes.values()\n\n >>> # iteration\n >>> n = 0\n >>> for val in values:\n ... n += val\n >>> print(n)\n 504\n\n >>> # keys and values are iterated over in the same order\n >>> list(keys)\n [\'eggs\', \'bacon\', \'sausage\', \'spam\']\n >>> list(values)\n [2, 1, 1, 500]\n\n >>> # view objects are dynamic and reflect dict changes\n >>> del dishes[\'eggs\']\n >>> del dishes[\'sausage\']\n >>> list(keys)\n [\'spam\', \'bacon\']\n\n >>> # set operations\n >>> keys & {\'eggs\', \'bacon\', \'salad\'}\n {\'bacon\'}\n >>> keys ^ {\'sausage\', \'juice\'}\n {\'juice\', \'sausage\', \'bacon\', \'spam\'}\n', 'typesmethods': '\nMethods\n*******\n\nMethods are functions that are called using the attribute notation.\nThere are two flavors: built-in methods (such as ``append()`` on\nlists) and class instance methods. Built-in methods are described\nwith the types that support them.\n\nIf you access a method (a function defined in a class namespace)\nthrough an instance, you get a special object: a *bound method* (also\ncalled *instance method*) object. When called, it will add the\n``self`` argument to the argument list. Bound methods have two\nspecial read-only attributes: ``m.__self__`` is the object on which\nthe method operates, and ``m.__func__`` is the function implementing\nthe method. Calling ``m(arg-1, arg-2, ..., arg-n)`` is completely\nequivalent to calling ``m.__func__(m.__self__, arg-1, arg-2, ...,\narg-n)``.\n\nLike function objects, bound method objects support getting arbitrary\nattributes. However, since method attributes are actually stored on\nthe underlying function object (``meth.__func__``), setting method\nattributes on bound methods is disallowed. Attempting to set an\nattribute on a method results in an ``AttributeError`` being raised.\nIn order to set a method attribute, you need to explicitly set it on\nthe underlying function object:\n\n >>> class C:\n ... def method(self):\n ... pass\n ...\n >>> c = C()\n >>> c.method.whoami = \'my name is method\' # can\'t set on the method\n Traceback (most recent call last):\n File "", line 1, in \n AttributeError: \'method\' object has no attribute \'whoami\'\n >>> c.method.__func__.whoami = \'my name is method\'\n >>> c.method.whoami\n \'my name is method\'\n\nSee *The standard type hierarchy* for more information.\n', 'typesmodules': "\nModules\n*******\n\nThe only special operation on a module is attribute access:\n``m.name``, where *m* is a module and *name* accesses a name defined\nin *m*'s symbol table. Module attributes can be assigned to. (Note\nthat the ``import`` statement is not, strictly speaking, an operation\non a module object; ``import foo`` does not require a module object\nnamed *foo* to exist, rather it requires an (external) *definition*\nfor a module named *foo* somewhere.)\n\nA special attribute of every module is ``__dict__``. This is the\ndictionary containing the module's symbol table. Modifying this\ndictionary will actually change the module's symbol table, but direct\nassignment to the ``__dict__`` attribute is not possible (you can\nwrite ``m.__dict__['a'] = 1``, which defines ``m.a`` to be ``1``, but\nyou can't write ``m.__dict__ = {}``). Modifying ``__dict__`` directly\nis not recommended.\n\nModules built into the interpreter are written like this: ````. If loaded from a file, they are written as\n````.\n", - 'typesseq': '\nSequence Types --- ``list``, ``tuple``, ``range``\n*************************************************\n\nThere are three basic sequence types: lists, tuples, and range\nobjects. Additional sequence types tailored for processing of *binary\ndata* and *text strings* are described in dedicated sections.\n\n\nCommon Sequence Operations\n==========================\n\nThe operations in the following table are supported by most sequence\ntypes, both mutable and immutable. The ``collections.abc.Sequence``\nABC is provided to make it easier to correctly implement these\noperations on custom sequence types.\n\nThis table lists the sequence operations sorted in ascending priority\n(operations in the same box have the same priority). In the table,\n*s* and *t* are sequences of the same type, *n*, *i*, *j* and *k* are\nintegers and *x* is an arbitrary object that meets any type and value\nrestrictions imposed by *s*.\n\nThe ``in`` and ``not in`` operations have the same priorities as the\ncomparison operations. The ``+`` (concatenation) and ``*``\n(repetition) operations have the same priority as the corresponding\nnumeric operations.\n\n+----------------------------+----------------------------------+------------+\n| Operation | Result | Notes |\n+============================+==================================+============+\n| ``x in s`` | ``True`` if an item of *s* is | (1) |\n| | equal to *x*, else ``False`` | |\n+----------------------------+----------------------------------+------------+\n| ``x not in s`` | ``False`` if an item of *s* is | (1) |\n| | equal to *x*, else ``True`` | |\n+----------------------------+----------------------------------+------------+\n| ``s + t`` | the concatenation of *s* and *t* | (6)(7) |\n+----------------------------+----------------------------------+------------+\n| ``s * n`` or ``n * s`` | *n* shallow copies of *s* | (2)(7) |\n| | concatenated | |\n+----------------------------+----------------------------------+------------+\n| ``s[i]`` | *i*th item of *s*, origin 0 | (3) |\n+----------------------------+----------------------------------+------------+\n| ``s[i:j]`` | slice of *s* from *i* to *j* | (3)(4) |\n+----------------------------+----------------------------------+------------+\n| ``s[i:j:k]`` | slice of *s* from *i* to *j* | (3)(5) |\n| | with step *k* | |\n+----------------------------+----------------------------------+------------+\n| ``len(s)`` | length of *s* | |\n+----------------------------+----------------------------------+------------+\n| ``min(s)`` | smallest item of *s* | |\n+----------------------------+----------------------------------+------------+\n| ``max(s)`` | largest item of *s* | |\n+----------------------------+----------------------------------+------------+\n| ``s.index(x[, i[, j]])`` | index of the first occurence of | (8) |\n| | *x* in *s* (at or after index | |\n| | *i* and before index *j*) | |\n+----------------------------+----------------------------------+------------+\n| ``s.count(x)`` | total number of occurences of | |\n| | *x* in *s* | |\n+----------------------------+----------------------------------+------------+\n\nSequences of the same type also support comparisons. In particular,\ntuples and lists are compared lexicographically by comparing\ncorresponding elements. This means that to compare equal, every\nelement must compare equal and the two sequences must be of the same\ntype and have the same length. (For full details see *Comparisons* in\nthe language reference.)\n\nNotes:\n\n1. While the ``in`` and ``not in`` operations are used only for simple\n containment testing in the general case, some specialised sequences\n (such as ``str``, ``bytes`` and ``bytearray``) also use them for\n subsequence testing:\n\n >>> "gg" in "eggs"\n True\n\n2. Values of *n* less than ``0`` are treated as ``0`` (which yields an\n empty sequence of the same type as *s*). Note also that the copies\n are shallow; nested structures are not copied. This often haunts\n new Python programmers; consider:\n\n >>> lists = [[]] * 3\n >>> lists\n [[], [], []]\n >>> lists[0].append(3)\n >>> lists\n [[3], [3], [3]]\n\n What has happened is that ``[[]]`` is a one-element list containing\n an empty list, so all three elements of ``[[]] * 3`` are (pointers\n to) this single empty list. Modifying any of the elements of\n ``lists`` modifies this single list. You can create a list of\n different lists this way:\n\n >>> lists = [[] for i in range(3)]\n >>> lists[0].append(3)\n >>> lists[1].append(5)\n >>> lists[2].append(7)\n >>> lists\n [[3], [5], [7]]\n\n3. If *i* or *j* is negative, the index is relative to the end of the\n string: ``len(s) + i`` or ``len(s) + j`` is substituted. But note\n that ``-0`` is still ``0``.\n\n4. The slice of *s* from *i* to *j* is defined as the sequence of\n items with index *k* such that ``i <= k < j``. If *i* or *j* is\n greater than ``len(s)``, use ``len(s)``. If *i* is omitted or\n ``None``, use ``0``. If *j* is omitted or ``None``, use\n ``len(s)``. If *i* is greater than or equal to *j*, the slice is\n empty.\n\n5. The slice of *s* from *i* to *j* with step *k* is defined as the\n sequence of items with index ``x = i + n*k`` such that ``0 <= n <\n (j-i)/k``. In other words, the indices are ``i``, ``i+k``,\n ``i+2*k``, ``i+3*k`` and so on, stopping when *j* is reached (but\n never including *j*). If *i* or *j* is greater than ``len(s)``,\n use ``len(s)``. If *i* or *j* are omitted or ``None``, they become\n "end" values (which end depends on the sign of *k*). Note, *k*\n cannot be zero. If *k* is ``None``, it is treated like ``1``.\n\n6. Concatenating immutable sequences always results in a new object.\n This means that building up a sequence by repeated concatenation\n will have a quadratic runtime cost in the total sequence length.\n To get a linear runtime cost, you must switch to one of the\n alternatives below:\n\n * if concatenating ``str`` objects, you can build a list and use\n ``str.join()`` at the end or else write to a ``io.StringIO``\n instance and retrieve its value when complete\n\n * if concatenating ``bytes`` objects, you can similarly use\n ``bytes.join()`` or ``io.BytesIO``, or you can do in-place\n concatenation with a ``bytearray`` object. ``bytearray`` objects\n are mutable and have an efficient overallocation mechanism\n\n * if concatenating ``tuple`` objects, extend a ``list`` instead\n\n * for other types, investigate the relevant class documentation\n\n7. Some sequence types (such as ``range``) only support item sequences\n that follow specific patterns, and hence don\'t support sequence\n concatenation or repetition.\n\n8. ``index`` raises ``ValueError`` when *x* is not found in *s*. When\n supported, the additional arguments to the index method allow\n efficient searching of subsections of the sequence. Passing the\n extra arguments is roughly equivalent to using ``s[i:j].index(x)``,\n only without copying any data and with the returned index being\n relative to the start of the sequence rather than the start of the\n slice.\n\n\nImmutable Sequence Types\n========================\n\nThe only operation that immutable sequence types generally implement\nthat is not also implemented by mutable sequence types is support for\nthe ``hash()`` built-in.\n\nThis support allows immutable sequences, such as ``tuple`` instances,\nto be used as ``dict`` keys and stored in ``set`` and ``frozenset``\ninstances.\n\nAttempting to hash an immutable sequence that contains unhashable\nvalues will result in ``TypeError``.\n\n\nMutable Sequence Types\n======================\n\nThe operations in the following table are defined on mutable sequence\ntypes. The ``collections.abc.MutableSequence`` ABC is provided to make\nit easier to correctly implement these operations on custom sequence\ntypes.\n\nIn the table *s* is an instance of a mutable sequence type, *t* is any\niterable object and *x* is an arbitrary object that meets any type and\nvalue restrictions imposed by *s* (for example, ``bytearray`` only\naccepts integers that meet the value restriction ``0 <= x <= 255``).\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\n| | *x* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\n| | replaced by the contents of the | |\n| | iterable *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\n| | replaced by those of *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j:k]`` | removes the elements of | |\n| | ``s[i:j:k]`` from the list | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.append(x)`` | appends *x* to the end of the | |\n| | sequence (same as | |\n| | ``s[len(s):len(s)] = [x]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.clear()`` | removes all items from ``s`` | (5) |\n| | (same as ``del s[:]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.copy()`` | creates a shallow copy of ``s`` | (5) |\n| | (same as ``s[:]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.extend(t)`` | extends *s* with the contents of | |\n| | *t* (same as ``s[len(s):len(s)] | |\n| | = t``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.insert(i, x)`` | inserts *x* into *s* at the | |\n| | index given by *i* (same as | |\n| | ``s[i:i] = [x]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.pop([i])`` | retrieves the item at *i* and | (2) |\n| | also removes it from *s* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.remove(x)`` | remove the first item from *s* | (3) |\n| | where ``s[i] == x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.reverse()`` | reverses the items of *s* in | (4) |\n| | place | |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. The optional argument *i* defaults to ``-1``, so that by default\n the last item is removed and returned.\n\n3. ``remove`` raises ``ValueError`` when *x* is not found in *s*.\n\n4. The ``reverse()`` method modifies the sequence in place for economy\n of space when reversing a large sequence. To remind users that it\n operates by side effect, it does not return the reversed sequence.\n\n5. ``clear()`` and ``copy()`` are included for consistency with the\n interfaces of mutable containers that don\'t support slicing\n operations (such as ``dict`` and ``set``)\n\n New in version 3.3: ``clear()`` and ``copy()`` methods.\n\n\nLists\n=====\n\nLists are mutable sequences, typically used to store collections of\nhomogeneous items (where the precise degree of similarity will vary by\napplication).\n\nclass class list([iterable])\n\n Lists may be constructed in several ways:\n\n * Using a pair of square brackets to denote the empty list: ``[]``\n\n * Using square brackets, separating items with commas: ``[a]``,\n ``[a, b, c]``\n\n * Using a list comprehension: ``[x for x in iterable]``\n\n * Using the type constructor: ``list()`` or ``list(iterable)``\n\n The constructor builds a list whose items are the same and in the\n same order as *iterable*\'s items. *iterable* may be either a\n sequence, a container that supports iteration, or an iterator\n object. If *iterable* is already a list, a copy is made and\n returned, similar to ``iterable[:]``. For example, ``list(\'abc\')``\n returns ``[\'a\', \'b\', \'c\']`` and ``list( (1, 2, 3) )`` returns ``[1,\n 2, 3]``. If no argument is given, the constructor creates a new\n empty list, ``[]``.\n\n Many other operations also produce lists, including the\n ``sorted()`` built-in.\n\n Lists implement all of the *common* and *mutable* sequence\n operations. Lists also provide the following additional method:\n\n sort(*, key=None, reverse=None)\n\n This method sorts the list in place, using only ``<``\n comparisons between items. Exceptions are not suppressed - if\n any comparison operations fail, the entire sort operation will\n fail (and the list will likely be left in a partially modified\n state).\n\n *key* specifies a function of one argument that is used to\n extract a comparison key from each list element (for example,\n ``key=str.lower``). The key corresponding to each item in the\n list is calculated once and then used for the entire sorting\n process. The default value of ``None`` means that list items are\n sorted directly without calculating a separate key value.\n\n The ``functools.cmp_to_key()`` utility is available to convert a\n 2.x style *cmp* function to a *key* function.\n\n *reverse* is a boolean value. If set to ``True``, then the list\n elements are sorted as if each comparison were reversed.\n\n This method modifies the sequence in place for economy of space\n when sorting a large sequence. To remind users that it operates\n by side effect, it does not return the sorted sequence (use\n ``sorted()`` to explicitly request a new sorted list instance).\n\n The ``sort()`` method is guaranteed to be stable. A sort is\n stable if it guarantees not to change the relative order of\n elements that compare equal --- this is helpful for sorting in\n multiple passes (for example, sort by department, then by salary\n grade).\n\n **CPython implementation detail:** While a list is being sorted,\n the effect of attempting to mutate, or even inspect, the list is\n undefined. The C implementation of Python makes the list appear\n empty for the duration, and raises ``ValueError`` if it can\n detect that the list has been mutated during a sort.\n\n\nTuples\n======\n\nTuples are immutable sequences, typically used to store collections of\nheterogeneous data (such as the 2-tuples produced by the\n``enumerate()`` built-in). Tuples are also used for cases where an\nimmutable sequence of homogeneous data is needed (such as allowing\nstorage in a ``set`` or ``dict`` instance).\n\nclass class tuple([iterable])\n\n Tuples may be constructed in a number of ways:\n\n * Using a pair of parentheses to denote the empty tuple: ``()``\n\n * Using a trailing comma for a singleton tuple: ``a,`` or ``(a,)``\n\n * Separating items with commas: ``a, b, c`` or ``(a, b, c)``\n\n * Using the ``tuple()`` built-in: ``tuple()`` or\n ``tuple(iterable)``\n\n The constructor builds a tuple whose items are the same and in the\n same order as *iterable*\'s items. *iterable* may be either a\n sequence, a container that supports iteration, or an iterator\n object. If *iterable* is already a tuple, it is returned\n unchanged. For example, ``tuple(\'abc\')`` returns ``(\'a\', \'b\',\n \'c\')`` and ``tuple( [1, 2, 3] )`` returns ``(1, 2, 3)``. If no\n argument is given, the constructor creates a new empty tuple,\n ``()``.\n\n Note that it is actually the comma which makes a tuple, not the\n parentheses. The parentheses are optional, except in the empty\n tuple case, or when they are needed to avoid syntactic ambiguity.\n For example, ``f(a, b, c)`` is a function call with three\n arguments, while ``f((a, b, c))`` is a function call with a 3-tuple\n as the sole argument.\n\n Tuples implement all of the *common* sequence operations.\n\nFor heterogeneous collections of data where access by name is clearer\nthan access by index, ``collections.namedtuple()`` may be a more\nappropriate choice than a simple tuple object.\n\n\nRanges\n======\n\nThe ``range`` type represents an immutable sequence of numbers and is\ncommonly used for looping a specific number of times in ``for`` loops.\n\nclass class range(stop)\nclass class range(start, stop[, step])\n\n The arguments to the range constructor must be integers (either\n built-in ``int`` or any object that implements the ``__index__``\n special method). If the *step* argument is omitted, it defaults to\n ``1``. If the *start* argument is omitted, it defaults to ``0``. If\n *step* is zero, ``ValueError`` is raised.\n\n For a positive *step*, the contents of a range ``r`` are determined\n by the formula ``r[i] = start + step*i`` where ``i >= 0`` and\n ``r[i] < stop``.\n\n For a negative *step*, the contents of the range are still\n determined by the formula ``r[i] = start + step*i``, but the\n constraints are ``i >= 0`` and ``r[i] > stop``.\n\n A range object will be empty if ``r[0]`` does not meet the value\n constraint. Ranges do support negative indices, but these are\n interpreted as indexing from the end of the sequence determined by\n the positive indices.\n\n Ranges containing absolute values larger than ``sys.maxsize`` are\n permitted but some features (such as ``len()``) may raise\n ``OverflowError``.\n\n Range examples:\n\n >>> list(range(10))\n [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n >>> list(range(1, 11))\n [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n >>> list(range(0, 30, 5))\n [0, 5, 10, 15, 20, 25]\n >>> list(range(0, 10, 3))\n [0, 3, 6, 9]\n >>> list(range(0, -10, -1))\n [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]\n >>> list(range(0))\n []\n >>> list(range(1, 0))\n []\n\n Ranges implement all of the *common* sequence operations except\n concatenation and repetition (due to the fact that range objects\n can only represent sequences that follow a strict pattern and\n repetition and concatenation will usually violate that pattern).\n\nThe advantage of the ``range`` type over a regular ``list`` or\n``tuple`` is that a ``range`` object will always take the same (small)\namount of memory, no matter the size of the range it represents (as it\nonly stores the ``start``, ``stop`` and ``step`` values, calculating\nindividual items and subranges as needed).\n\nRange objects implement the ``collections.Sequence`` ABC, and provide\nfeatures such as containment tests, element index lookup, slicing and\nsupport for negative indices (see *Sequence Types --- list, tuple,\nrange*):\n\n>>> r = range(0, 20, 2)\n>>> r\nrange(0, 20, 2)\n>>> 11 in r\nFalse\n>>> 10 in r\nTrue\n>>> r.index(10)\n5\n>>> r[5]\n10\n>>> r[:5]\nrange(0, 10, 2)\n>>> r[-1]\n18\n\nTesting range objects for equality with ``==`` and ``!=`` compares\nthem as sequences. That is, two range objects are considered equal if\nthey represent the same sequence of values. (Note that two range\nobjects that compare equal might have different ``start``, ``stop``\nand ``step`` attributes, for example ``range(0) == range(2, 1, 3)`` or\n``range(0, 3, 2) == range(0, 4, 2)``.)\n\nChanged in version 3.2: Implement the Sequence ABC. Support slicing\nand negative indices. Test ``int`` objects for membership in constant\ntime instead of iterating through all items.\n\nChanged in version 3.3: Define \'==\' and \'!=\' to compare range objects\nbased on the sequence of values they define (instead of comparing\nbased on object identity).\n\nNew in version 3.3: The ``start``, ``stop`` and ``step`` attributes.\n', + 'typesseq': '\nSequence Types --- ``list``, ``tuple``, ``range``\n*************************************************\n\nThere are three basic sequence types: lists, tuples, and range\nobjects. Additional sequence types tailored for processing of *binary\ndata* and *text strings* are described in dedicated sections.\n\n\nCommon Sequence Operations\n==========================\n\nThe operations in the following table are supported by most sequence\ntypes, both mutable and immutable. The ``collections.abc.Sequence``\nABC is provided to make it easier to correctly implement these\noperations on custom sequence types.\n\nThis table lists the sequence operations sorted in ascending priority\n(operations in the same box have the same priority). In the table,\n*s* and *t* are sequences of the same type, *n*, *i*, *j* and *k* are\nintegers and *x* is an arbitrary object that meets any type and value\nrestrictions imposed by *s*.\n\nThe ``in`` and ``not in`` operations have the same priorities as the\ncomparison operations. The ``+`` (concatenation) and ``*``\n(repetition) operations have the same priority as the corresponding\nnumeric operations.\n\n+----------------------------+----------------------------------+------------+\n| Operation | Result | Notes |\n+============================+==================================+============+\n| ``x in s`` | ``True`` if an item of *s* is | (1) |\n| | equal to *x*, else ``False`` | |\n+----------------------------+----------------------------------+------------+\n| ``x not in s`` | ``False`` if an item of *s* is | (1) |\n| | equal to *x*, else ``True`` | |\n+----------------------------+----------------------------------+------------+\n| ``s + t`` | the concatenation of *s* and *t* | (6)(7) |\n+----------------------------+----------------------------------+------------+\n| ``s * n`` or ``n * s`` | *n* shallow copies of *s* | (2)(7) |\n| | concatenated | |\n+----------------------------+----------------------------------+------------+\n| ``s[i]`` | *i*th item of *s*, origin 0 | (3) |\n+----------------------------+----------------------------------+------------+\n| ``s[i:j]`` | slice of *s* from *i* to *j* | (3)(4) |\n+----------------------------+----------------------------------+------------+\n| ``s[i:j:k]`` | slice of *s* from *i* to *j* | (3)(5) |\n| | with step *k* | |\n+----------------------------+----------------------------------+------------+\n| ``len(s)`` | length of *s* | |\n+----------------------------+----------------------------------+------------+\n| ``min(s)`` | smallest item of *s* | |\n+----------------------------+----------------------------------+------------+\n| ``max(s)`` | largest item of *s* | |\n+----------------------------+----------------------------------+------------+\n| ``s.index(x[, i[, j]])`` | index of the first occurrence of | (8) |\n| | *x* in *s* (at or after index | |\n| | *i* and before index *j*) | |\n+----------------------------+----------------------------------+------------+\n| ``s.count(x)`` | total number of occurrences of | |\n| | *x* in *s* | |\n+----------------------------+----------------------------------+------------+\n\nSequences of the same type also support comparisons. In particular,\ntuples and lists are compared lexicographically by comparing\ncorresponding elements. This means that to compare equal, every\nelement must compare equal and the two sequences must be of the same\ntype and have the same length. (For full details see *Comparisons* in\nthe language reference.)\n\nNotes:\n\n1. While the ``in`` and ``not in`` operations are used only for simple\n containment testing in the general case, some specialised sequences\n (such as ``str``, ``bytes`` and ``bytearray``) also use them for\n subsequence testing:\n\n >>> "gg" in "eggs"\n True\n\n2. Values of *n* less than ``0`` are treated as ``0`` (which yields an\n empty sequence of the same type as *s*). Note also that the copies\n are shallow; nested structures are not copied. This often haunts\n new Python programmers; consider:\n\n >>> lists = [[]] * 3\n >>> lists\n [[], [], []]\n >>> lists[0].append(3)\n >>> lists\n [[3], [3], [3]]\n\n What has happened is that ``[[]]`` is a one-element list containing\n an empty list, so all three elements of ``[[]] * 3`` are (pointers\n to) this single empty list. Modifying any of the elements of\n ``lists`` modifies this single list. You can create a list of\n different lists this way:\n\n >>> lists = [[] for i in range(3)]\n >>> lists[0].append(3)\n >>> lists[1].append(5)\n >>> lists[2].append(7)\n >>> lists\n [[3], [5], [7]]\n\n3. If *i* or *j* is negative, the index is relative to the end of the\n string: ``len(s) + i`` or ``len(s) + j`` is substituted. But note\n that ``-0`` is still ``0``.\n\n4. The slice of *s* from *i* to *j* is defined as the sequence of\n items with index *k* such that ``i <= k < j``. If *i* or *j* is\n greater than ``len(s)``, use ``len(s)``. If *i* is omitted or\n ``None``, use ``0``. If *j* is omitted or ``None``, use\n ``len(s)``. If *i* is greater than or equal to *j*, the slice is\n empty.\n\n5. The slice of *s* from *i* to *j* with step *k* is defined as the\n sequence of items with index ``x = i + n*k`` such that ``0 <= n <\n (j-i)/k``. In other words, the indices are ``i``, ``i+k``,\n ``i+2*k``, ``i+3*k`` and so on, stopping when *j* is reached (but\n never including *j*). If *i* or *j* is greater than ``len(s)``,\n use ``len(s)``. If *i* or *j* are omitted or ``None``, they become\n "end" values (which end depends on the sign of *k*). Note, *k*\n cannot be zero. If *k* is ``None``, it is treated like ``1``.\n\n6. Concatenating immutable sequences always results in a new object.\n This means that building up a sequence by repeated concatenation\n will have a quadratic runtime cost in the total sequence length.\n To get a linear runtime cost, you must switch to one of the\n alternatives below:\n\n * if concatenating ``str`` objects, you can build a list and use\n ``str.join()`` at the end or else write to a ``io.StringIO``\n instance and retrieve its value when complete\n\n * if concatenating ``bytes`` objects, you can similarly use\n ``bytes.join()`` or ``io.BytesIO``, or you can do in-place\n concatenation with a ``bytearray`` object. ``bytearray`` objects\n are mutable and have an efficient overallocation mechanism\n\n * if concatenating ``tuple`` objects, extend a ``list`` instead\n\n * for other types, investigate the relevant class documentation\n\n7. Some sequence types (such as ``range``) only support item sequences\n that follow specific patterns, and hence don\'t support sequence\n concatenation or repetition.\n\n8. ``index`` raises ``ValueError`` when *x* is not found in *s*. When\n supported, the additional arguments to the index method allow\n efficient searching of subsections of the sequence. Passing the\n extra arguments is roughly equivalent to using ``s[i:j].index(x)``,\n only without copying any data and with the returned index being\n relative to the start of the sequence rather than the start of the\n slice.\n\n\nImmutable Sequence Types\n========================\n\nThe only operation that immutable sequence types generally implement\nthat is not also implemented by mutable sequence types is support for\nthe ``hash()`` built-in.\n\nThis support allows immutable sequences, such as ``tuple`` instances,\nto be used as ``dict`` keys and stored in ``set`` and ``frozenset``\ninstances.\n\nAttempting to hash an immutable sequence that contains unhashable\nvalues will result in ``TypeError``.\n\n\nMutable Sequence Types\n======================\n\nThe operations in the following table are defined on mutable sequence\ntypes. The ``collections.abc.MutableSequence`` ABC is provided to make\nit easier to correctly implement these operations on custom sequence\ntypes.\n\nIn the table *s* is an instance of a mutable sequence type, *t* is any\niterable object and *x* is an arbitrary object that meets any type and\nvalue restrictions imposed by *s* (for example, ``bytearray`` only\naccepts integers that meet the value restriction ``0 <= x <= 255``).\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\n| | *x* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\n| | replaced by the contents of the | |\n| | iterable *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\n| | replaced by those of *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j:k]`` | removes the elements of | |\n| | ``s[i:j:k]`` from the list | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.append(x)`` | appends *x* to the end of the | |\n| | sequence (same as | |\n| | ``s[len(s):len(s)] = [x]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.clear()`` | removes all items from ``s`` | (5) |\n| | (same as ``del s[:]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.copy()`` | creates a shallow copy of ``s`` | (5) |\n| | (same as ``s[:]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.extend(t)`` | extends *s* with the contents of | |\n| | *t* (same as ``s[len(s):len(s)] | |\n| | = t``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.insert(i, x)`` | inserts *x* into *s* at the | |\n| | index given by *i* (same as | |\n| | ``s[i:i] = [x]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.pop([i])`` | retrieves the item at *i* and | (2) |\n| | also removes it from *s* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.remove(x)`` | remove the first item from *s* | (3) |\n| | where ``s[i] == x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.reverse()`` | reverses the items of *s* in | (4) |\n| | place | |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. The optional argument *i* defaults to ``-1``, so that by default\n the last item is removed and returned.\n\n3. ``remove`` raises ``ValueError`` when *x* is not found in *s*.\n\n4. The ``reverse()`` method modifies the sequence in place for economy\n of space when reversing a large sequence. To remind users that it\n operates by side effect, it does not return the reversed sequence.\n\n5. ``clear()`` and ``copy()`` are included for consistency with the\n interfaces of mutable containers that don\'t support slicing\n operations (such as ``dict`` and ``set``)\n\n New in version 3.3: ``clear()`` and ``copy()`` methods.\n\n\nLists\n=====\n\nLists are mutable sequences, typically used to store collections of\nhomogeneous items (where the precise degree of similarity will vary by\napplication).\n\nclass class list([iterable])\n\n Lists may be constructed in several ways:\n\n * Using a pair of square brackets to denote the empty list: ``[]``\n\n * Using square brackets, separating items with commas: ``[a]``,\n ``[a, b, c]``\n\n * Using a list comprehension: ``[x for x in iterable]``\n\n * Using the type constructor: ``list()`` or ``list(iterable)``\n\n The constructor builds a list whose items are the same and in the\n same order as *iterable*\'s items. *iterable* may be either a\n sequence, a container that supports iteration, or an iterator\n object. If *iterable* is already a list, a copy is made and\n returned, similar to ``iterable[:]``. For example, ``list(\'abc\')``\n returns ``[\'a\', \'b\', \'c\']`` and ``list( (1, 2, 3) )`` returns ``[1,\n 2, 3]``. If no argument is given, the constructor creates a new\n empty list, ``[]``.\n\n Many other operations also produce lists, including the\n ``sorted()`` built-in.\n\n Lists implement all of the *common* and *mutable* sequence\n operations. Lists also provide the following additional method:\n\n sort(*, key=None, reverse=None)\n\n This method sorts the list in place, using only ``<``\n comparisons between items. Exceptions are not suppressed - if\n any comparison operations fail, the entire sort operation will\n fail (and the list will likely be left in a partially modified\n state).\n\n *key* specifies a function of one argument that is used to\n extract a comparison key from each list element (for example,\n ``key=str.lower``). The key corresponding to each item in the\n list is calculated once and then used for the entire sorting\n process. The default value of ``None`` means that list items are\n sorted directly without calculating a separate key value.\n\n The ``functools.cmp_to_key()`` utility is available to convert a\n 2.x style *cmp* function to a *key* function.\n\n *reverse* is a boolean value. If set to ``True``, then the list\n elements are sorted as if each comparison were reversed.\n\n This method modifies the sequence in place for economy of space\n when sorting a large sequence. To remind users that it operates\n by side effect, it does not return the sorted sequence (use\n ``sorted()`` to explicitly request a new sorted list instance).\n\n The ``sort()`` method is guaranteed to be stable. A sort is\n stable if it guarantees not to change the relative order of\n elements that compare equal --- this is helpful for sorting in\n multiple passes (for example, sort by department, then by salary\n grade).\n\n **CPython implementation detail:** While a list is being sorted,\n the effect of attempting to mutate, or even inspect, the list is\n undefined. The C implementation of Python makes the list appear\n empty for the duration, and raises ``ValueError`` if it can\n detect that the list has been mutated during a sort.\n\n\nTuples\n======\n\nTuples are immutable sequences, typically used to store collections of\nheterogeneous data (such as the 2-tuples produced by the\n``enumerate()`` built-in). Tuples are also used for cases where an\nimmutable sequence of homogeneous data is needed (such as allowing\nstorage in a ``set`` or ``dict`` instance).\n\nclass class tuple([iterable])\n\n Tuples may be constructed in a number of ways:\n\n * Using a pair of parentheses to denote the empty tuple: ``()``\n\n * Using a trailing comma for a singleton tuple: ``a,`` or ``(a,)``\n\n * Separating items with commas: ``a, b, c`` or ``(a, b, c)``\n\n * Using the ``tuple()`` built-in: ``tuple()`` or\n ``tuple(iterable)``\n\n The constructor builds a tuple whose items are the same and in the\n same order as *iterable*\'s items. *iterable* may be either a\n sequence, a container that supports iteration, or an iterator\n object. If *iterable* is already a tuple, it is returned\n unchanged. For example, ``tuple(\'abc\')`` returns ``(\'a\', \'b\',\n \'c\')`` and ``tuple( [1, 2, 3] )`` returns ``(1, 2, 3)``. If no\n argument is given, the constructor creates a new empty tuple,\n ``()``.\n\n Note that it is actually the comma which makes a tuple, not the\n parentheses. The parentheses are optional, except in the empty\n tuple case, or when they are needed to avoid syntactic ambiguity.\n For example, ``f(a, b, c)`` is a function call with three\n arguments, while ``f((a, b, c))`` is a function call with a 3-tuple\n as the sole argument.\n\n Tuples implement all of the *common* sequence operations.\n\nFor heterogeneous collections of data where access by name is clearer\nthan access by index, ``collections.namedtuple()`` may be a more\nappropriate choice than a simple tuple object.\n\n\nRanges\n======\n\nThe ``range`` type represents an immutable sequence of numbers and is\ncommonly used for looping a specific number of times in ``for`` loops.\n\nclass class range(stop)\nclass class range(start, stop[, step])\n\n The arguments to the range constructor must be integers (either\n built-in ``int`` or any object that implements the ``__index__``\n special method). If the *step* argument is omitted, it defaults to\n ``1``. If the *start* argument is omitted, it defaults to ``0``. If\n *step* is zero, ``ValueError`` is raised.\n\n For a positive *step*, the contents of a range ``r`` are determined\n by the formula ``r[i] = start + step*i`` where ``i >= 0`` and\n ``r[i] < stop``.\n\n For a negative *step*, the contents of the range are still\n determined by the formula ``r[i] = start + step*i``, but the\n constraints are ``i >= 0`` and ``r[i] > stop``.\n\n A range object will be empty if ``r[0]`` does not meet the value\n constraint. Ranges do support negative indices, but these are\n interpreted as indexing from the end of the sequence determined by\n the positive indices.\n\n Ranges containing absolute values larger than ``sys.maxsize`` are\n permitted but some features (such as ``len()``) may raise\n ``OverflowError``.\n\n Range examples:\n\n >>> list(range(10))\n [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n >>> list(range(1, 11))\n [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n >>> list(range(0, 30, 5))\n [0, 5, 10, 15, 20, 25]\n >>> list(range(0, 10, 3))\n [0, 3, 6, 9]\n >>> list(range(0, -10, -1))\n [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]\n >>> list(range(0))\n []\n >>> list(range(1, 0))\n []\n\n Ranges implement all of the *common* sequence operations except\n concatenation and repetition (due to the fact that range objects\n can only represent sequences that follow a strict pattern and\n repetition and concatenation will usually violate that pattern).\n\nThe advantage of the ``range`` type over a regular ``list`` or\n``tuple`` is that a ``range`` object will always take the same (small)\namount of memory, no matter the size of the range it represents (as it\nonly stores the ``start``, ``stop`` and ``step`` values, calculating\nindividual items and subranges as needed).\n\nRange objects implement the ``collections.Sequence`` ABC, and provide\nfeatures such as containment tests, element index lookup, slicing and\nsupport for negative indices (see *Sequence Types --- list, tuple,\nrange*):\n\n>>> r = range(0, 20, 2)\n>>> r\nrange(0, 20, 2)\n>>> 11 in r\nFalse\n>>> 10 in r\nTrue\n>>> r.index(10)\n5\n>>> r[5]\n10\n>>> r[:5]\nrange(0, 10, 2)\n>>> r[-1]\n18\n\nTesting range objects for equality with ``==`` and ``!=`` compares\nthem as sequences. That is, two range objects are considered equal if\nthey represent the same sequence of values. (Note that two range\nobjects that compare equal might have different ``start``, ``stop``\nand ``step`` attributes, for example ``range(0) == range(2, 1, 3)`` or\n``range(0, 3, 2) == range(0, 4, 2)``.)\n\nChanged in version 3.2: Implement the Sequence ABC. Support slicing\nand negative indices. Test ``int`` objects for membership in constant\ntime instead of iterating through all items.\n\nChanged in version 3.3: Define \'==\' and \'!=\' to compare range objects\nbased on the sequence of values they define (instead of comparing\nbased on object identity).\n\nNew in version 3.3: The ``start``, ``stop`` and ``step`` attributes.\n', 'typesseq-mutable': "\nMutable Sequence Types\n**********************\n\nThe operations in the following table are defined on mutable sequence\ntypes. The ``collections.abc.MutableSequence`` ABC is provided to make\nit easier to correctly implement these operations on custom sequence\ntypes.\n\nIn the table *s* is an instance of a mutable sequence type, *t* is any\niterable object and *x* is an arbitrary object that meets any type and\nvalue restrictions imposed by *s* (for example, ``bytearray`` only\naccepts integers that meet the value restriction ``0 <= x <= 255``).\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\n| | *x* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\n| | replaced by the contents of the | |\n| | iterable *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\n| | replaced by those of *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j:k]`` | removes the elements of | |\n| | ``s[i:j:k]`` from the list | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.append(x)`` | appends *x* to the end of the | |\n| | sequence (same as | |\n| | ``s[len(s):len(s)] = [x]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.clear()`` | removes all items from ``s`` | (5) |\n| | (same as ``del s[:]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.copy()`` | creates a shallow copy of ``s`` | (5) |\n| | (same as ``s[:]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.extend(t)`` | extends *s* with the contents of | |\n| | *t* (same as ``s[len(s):len(s)] | |\n| | = t``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.insert(i, x)`` | inserts *x* into *s* at the | |\n| | index given by *i* (same as | |\n| | ``s[i:i] = [x]``) | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.pop([i])`` | retrieves the item at *i* and | (2) |\n| | also removes it from *s* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.remove(x)`` | remove the first item from *s* | (3) |\n| | where ``s[i] == x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.reverse()`` | reverses the items of *s* in | (4) |\n| | place | |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. The optional argument *i* defaults to ``-1``, so that by default\n the last item is removed and returned.\n\n3. ``remove`` raises ``ValueError`` when *x* is not found in *s*.\n\n4. The ``reverse()`` method modifies the sequence in place for economy\n of space when reversing a large sequence. To remind users that it\n operates by side effect, it does not return the reversed sequence.\n\n5. ``clear()`` and ``copy()`` are included for consistency with the\n interfaces of mutable containers that don't support slicing\n operations (such as ``dict`` and ``set``)\n\n New in version 3.3: ``clear()`` and ``copy()`` methods.\n", 'unary': '\nUnary arithmetic and bitwise operations\n***************************************\n\nAll unary arithmetic and bitwise operations have the same priority:\n\n u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr\n\nThe unary ``-`` (minus) operator yields the negation of its numeric\nargument.\n\nThe unary ``+`` (plus) operator yields its numeric argument unchanged.\n\nThe unary ``~`` (invert) operator yields the bitwise inversion of its\ninteger argument. The bitwise inversion of ``x`` is defined as\n``-(x+1)``. It only applies to integral numbers.\n\nIn all three cases, if the argument does not have the proper type, a\n``TypeError`` exception is raised.\n', 'while': '\nThe ``while`` statement\n***********************\n\nThe ``while`` statement is used for repeated execution as long as an\nexpression is true:\n\n while_stmt ::= "while" expression ":" suite\n ["else" ":" suite]\n\nThis repeatedly tests the expression and, if it is true, executes the\nfirst suite; if the expression is false (which may be the first time\nit is tested) the suite of the ``else`` clause, if present, is\nexecuted and the loop terminates.\n\nA ``break`` statement executed in the first suite terminates the loop\nwithout executing the ``else`` clause\'s suite. A ``continue``\nstatement executed in the first suite skips the rest of the suite and\ngoes back to testing the expression.\n', -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 08:33:17 2013 From: python-checkins at python.org (larry.hastings) Date: Sun, 4 Aug 2013 08:33:17 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Bumped_version_to_3=2E4=2E?= =?utf-8?q?0a1=2E?= Message-ID: <3c7C3d0G4WzSg2@mail.python.org> http://hg.python.org/cpython/rev/46535f65e7f3 changeset: 85010:46535f65e7f3 tag: v3.4.0a1 user: Larry Hastings date: Sat Aug 03 12:58:12 2013 -0700 summary: Bumped version to 3.4.0a1. files: Include/patchlevel.h | 4 ++-- Lib/distutils/__init__.py | 2 +- Lib/idlelib/idlever.py | 2 +- Misc/NEWS | 2 +- Misc/RPM/python-3.4.spec | 2 +- README | 4 ++-- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Include/patchlevel.h b/Include/patchlevel.h --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -20,10 +20,10 @@ #define PY_MINOR_VERSION 4 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA -#define PY_RELEASE_SERIAL 0 +#define PY_RELEASE_SERIAL 1 /* Version as a string */ -#define PY_VERSION "3.4.0a0" +#define PY_VERSION "3.4.0a1" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py --- a/Lib/distutils/__init__.py +++ b/Lib/distutils/__init__.py @@ -13,5 +13,5 @@ # Updated automatically by the Python release process. # #--start constants-- -__version__ = "3.4.0a0" +__version__ = "3.4.0a1" #--end constants-- diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py --- a/Lib/idlelib/idlever.py +++ b/Lib/idlelib/idlever.py @@ -1,1 +1,1 @@ -IDLE_VERSION = "3.4.0a0" +IDLE_VERSION = "3.4.0a1" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -5,7 +5,7 @@ What's New in Python 3.4.0 Alpha 1? =================================== -*Relase date: 2014-XX-XX* +Release date: 2013-08-03 Core and Builtins ----------------- diff --git a/Misc/RPM/python-3.4.spec b/Misc/RPM/python-3.4.spec --- a/Misc/RPM/python-3.4.spec +++ b/Misc/RPM/python-3.4.spec @@ -39,7 +39,7 @@ %define name python #--start constants-- -%define version 3.4.0a0 +%define version 3.4.0a1 %define libvers 3.4 #--end constants-- %define release 1pydotorg diff --git a/README b/README --- a/README +++ b/README @@ -1,5 +1,5 @@ -This is Python version 3.4.0 prerelease -======================================= +This is Python version 3.4.0 alpha 1 +==================================== Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 Python Software Foundation. All rights reserved. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 08:33:18 2013 From: python-checkins at python.org (larry.hastings) Date: Sun, 4 Aug 2013 08:33:18 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Added_tag_v3=2E4=2E0a1_for?= =?utf-8?q?_changeset_46535f65e7f3?= Message-ID: <3c7C3f2JNDz7LjR@mail.python.org> http://hg.python.org/cpython/rev/966bcde95e4f changeset: 85011:966bcde95e4f user: Larry Hastings date: Sat Aug 03 13:00:18 2013 -0700 summary: Added tag v3.4.0a1 for changeset 46535f65e7f3 files: .hgtags | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/.hgtags b/.hgtags --- a/.hgtags +++ b/.hgtags @@ -114,3 +114,4 @@ 92c2cfb924055ce68c4f78f836dcfe688437ceb8 v3.3.1rc1 d9893d13c6289aa03d33559ec67f97dcbf5c9e3c v3.3.1 d047928ae3f6314a13b6137051315453d0ae89b6 v3.3.2 +46535f65e7f3bcdcf176f36d34bc1fed719ffd2b v3.4.0a1 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 08:33:20 2013 From: python-checkins at python.org (larry.hastings) Date: Sun, 4 Aug 2013 08:33:20 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_default_-=3E_default?= =?utf-8?q?=29=3A_Merge_from_v3=2E4=2E0a1_head=2E?= Message-ID: <3c7C3h0Djxz7Ljg@mail.python.org> http://hg.python.org/cpython/rev/edc668a667ad changeset: 85012:edc668a667ad parent: 85011:966bcde95e4f parent: 84999:e9cecb612ff7 user: Larry Hastings date: Sat Aug 03 13:01:39 2013 -0700 summary: Merge from v3.4.0a1 head. files: Include/longobject.h | 1 + Lib/test/test_int.py | 47 ++++++++++----- Lib/test/test_re.py | 10 +++ Misc/NEWS | 7 ++ Modules/_sre.c | 12 ++-- Modules/sre.h | 2 +- Objects/abstract.c | 29 +-------- Objects/longobject.c | 91 +++++++++++++++++++++---------- Tools/msi/msi.py | 5 +- 9 files changed, 126 insertions(+), 78 deletions(-) diff --git a/Include/longobject.h b/Include/longobject.h --- a/Include/longobject.h +++ b/Include/longobject.h @@ -97,6 +97,7 @@ #ifndef Py_LIMITED_API PyAPI_FUNC(PyObject *) PyLong_FromUnicode(Py_UNICODE*, Py_ssize_t, int); PyAPI_FUNC(PyObject *) PyLong_FromUnicodeObject(PyObject *u, int base); +PyAPI_FUNC(PyObject *) _PyLong_FromBytes(const char *, Py_ssize_t, int); #endif #ifndef Py_LIMITED_API diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py --- a/Lib/test/test_int.py +++ b/Lib/test/test_int.py @@ -73,14 +73,6 @@ x = -1-sys.maxsize self.assertEqual(x >> 1, x//2) - self.assertRaises(ValueError, int, '123\0') - self.assertRaises(ValueError, int, '53', 40) - - # SF bug 1545497: embedded NULs were not detected with - # explicit base - self.assertRaises(ValueError, int, '123\0', 10) - self.assertRaises(ValueError, int, '123\x00 245', 20) - x = int('1' * 600) self.assertIsInstance(x, int) @@ -401,14 +393,37 @@ int(TruncReturnsBadInt()) def test_error_message(self): - testlist = ('\xbd', '123\xbd', ' 123 456 ') - for s in testlist: - try: - int(s) - except ValueError as e: - self.assertIn(s.strip(), e.args[0]) - else: - self.fail("Expected int(%r) to raise a ValueError", s) + def check(s, base=None): + with self.assertRaises(ValueError, + msg="int(%r, %r)" % (s, base)) as cm: + if base is None: + int(s) + else: + int(s, base) + self.assertEqual(cm.exception.args[0], + "invalid literal for int() with base %d: %r" % + (10 if base is None else base, s)) + + check('\xbd') + check('123\xbd') + check(' 123 456 ') + + check('123\x00') + # SF bug 1545497: embedded NULs were not detected with explicit base + check('123\x00', 10) + check('123\x00 245', 20) + check('123\x00 245', 16) + check('123\x00245', 20) + check('123\x00245', 16) + # byte string with embedded NUL + check(b'123\x00') + check(b'123\x00', 10) + # non-UTF-8 byte string + check(b'123\xbd') + check(b'123\xbd', 10) + # lone surrogate in Unicode string + check('123\ud800') + check('123\ud800', 10) def test_main(): support.run_unittest(IntTestCases) diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -1040,6 +1040,16 @@ with self.assertRaisesRegex(sre_constants.error, '\?foo'): re.compile('(?P)') + def test_issue17998(self): + for reps in '*', '+', '?', '{1}': + for mod in '', '?': + pattern = '.' + reps + mod + 'yz' + self.assertEqual(re.compile(pattern, re.S).findall('xyz'), + ['xyz'], msg=pattern) + pattern = pattern.encode() + self.assertEqual(re.compile(pattern, re.S).findall(b'xyz'), + [b'xyz'], msg=pattern) + def run_re_tests(): from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #16741: Fix an error reporting in int(). + - Issue #17899: Fix rare file descriptor leak in os.listdir(). - Issue #9035: ismount now recognises volumes mounted below a drive root @@ -181,6 +183,8 @@ Library ------- +- Issue #17998: Fix an internal error in regular expression engine. + - Issue #17557: Fix os.getgroups() to work with the modified behavior of getgroups(2) on OS X 10.8. Original patch by Mateusz Lenik. @@ -761,6 +765,9 @@ Build ----- +- Issue #16067: Add description into MSI file to replace installer's + temporary name. + - Issue #18257: Fix readlink usage in python-config. Install the python version again on Darwin. diff --git a/Modules/_sre.c b/Modules/_sre.c --- a/Modules/_sre.c +++ b/Modules/_sre.c @@ -997,7 +997,7 @@ TRACE(("|%p|%p|REPEAT_ONE %d %d\n", ctx->pattern, ctx->ptr, ctx->pattern[1], ctx->pattern[2])); - if (ctx->pattern[1] > (end - ctx->ptr) / state->charsize) + if ((Py_ssize_t) ctx->pattern[1] > (end - ctx->ptr) / state->charsize) RETURN_FAILURE; /* cannot match */ state->ptr = ctx->ptr; @@ -1081,7 +1081,7 @@ TRACE(("|%p|%p|MIN_REPEAT_ONE %d %d\n", ctx->pattern, ctx->ptr, ctx->pattern[1], ctx->pattern[2])); - if (ctx->pattern[1] > (end - ctx->ptr) / state->charsize) + if ((Py_ssize_t) ctx->pattern[1] > (end - ctx->ptr) / state->charsize) RETURN_FAILURE; /* cannot match */ state->ptr = ctx->ptr; @@ -1180,7 +1180,7 @@ TRACE(("|%p|%p|MAX_UNTIL %d\n", ctx->pattern, ctx->ptr, ctx->count)); - if (ctx->count < ctx->u.rep->pattern[1]) { + if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { /* not enough matches */ ctx->u.rep->count = ctx->count; DO_JUMP(JUMP_MAX_UNTIL_1, jump_max_until_1, @@ -1194,7 +1194,7 @@ RETURN_FAILURE; } - if ((ctx->count < ctx->u.rep->pattern[2] || + if ((ctx->count < (Py_ssize_t) ctx->u.rep->pattern[2] || ctx->u.rep->pattern[2] == SRE_MAXREPEAT) && state->ptr != ctx->u.rep->last_ptr) { /* we may have enough matches, but if we can @@ -1243,7 +1243,7 @@ TRACE(("|%p|%p|MIN_UNTIL %d %p\n", ctx->pattern, ctx->ptr, ctx->count, ctx->u.rep->pattern)); - if (ctx->count < ctx->u.rep->pattern[1]) { + if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { /* not enough matches */ ctx->u.rep->count = ctx->count; DO_JUMP(JUMP_MIN_UNTIL_1, jump_min_until_1, @@ -1272,7 +1272,7 @@ LASTMARK_RESTORE(); - if ((ctx->count >= ctx->u.rep->pattern[2] + if ((ctx->count >= (Py_ssize_t) ctx->u.rep->pattern[2] && ctx->u.rep->pattern[2] != SRE_MAXREPEAT) || state->ptr == ctx->u.rep->last_ptr) RETURN_FAILURE; diff --git a/Modules/sre.h b/Modules/sre.h --- a/Modules/sre.h +++ b/Modules/sre.h @@ -19,7 +19,7 @@ #if SIZEOF_SIZE_T > 4 # define SRE_MAXREPEAT (~(SRE_CODE)0) #else -# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX + 1u) +# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX) #endif typedef struct { diff --git a/Objects/abstract.c b/Objects/abstract.c --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -1261,25 +1261,6 @@ } -/* Add a check for embedded NULL-bytes in the argument. */ -static PyObject * -long_from_string(const char *s, Py_ssize_t len) -{ - char *end; - PyObject *x; - - x = PyLong_FromString((char*)s, &end, 10); - if (x == NULL) - return NULL; - if (end != s + len) { - PyErr_SetString(PyExc_ValueError, - "null byte in argument for int()"); - Py_DECREF(x); - return NULL; - } - return x; -} - PyObject * PyNumber_Long(PyObject *o) { @@ -1327,16 +1308,16 @@ if (PyBytes_Check(o)) /* need to do extra error checking that PyLong_FromString() - * doesn't do. In particular int('9.5') must raise an - * exception, not truncate the float. + * doesn't do. In particular int('9\x005') must raise an + * exception, not truncate at the null. */ - return long_from_string(PyBytes_AS_STRING(o), - PyBytes_GET_SIZE(o)); + return _PyLong_FromBytes(PyBytes_AS_STRING(o), + PyBytes_GET_SIZE(o), 10); if (PyUnicode_Check(o)) /* The above check is done in PyLong_FromUnicode(). */ return PyLong_FromUnicodeObject(o, 10); if (!PyObject_AsCharBuffer(o, &buffer, &buffer_len)) - return long_from_string(buffer, buffer_len); + return _PyLong_FromBytes(buffer, buffer_len, 10); return type_error("int() argument must be a string or a " "number, not '%.200s'", o); diff --git a/Objects/longobject.c b/Objects/longobject.c --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -2000,6 +2000,14 @@ return long_normalize(z); } +/* Parses a long from a bytestring. Leading and trailing whitespace will be + * ignored. + * + * If successful, a PyLong object will be returned and 'pend' will be pointing + * to the first unused byte unless it's NULL. + * + * If unsuccessful, NULL will be returned. + */ PyObject * PyLong_FromString(char *str, char **pend, int base) { @@ -2262,24 +2270,54 @@ str++; if (*str != '\0') goto onError; - if (pend) + long_normalize(z); + z = maybe_small_long(z); + if (z == NULL) + return NULL; + if (pend != NULL) *pend = str; - long_normalize(z); - return (PyObject *) maybe_small_long(z); + return (PyObject *) z; onError: + if (pend != NULL) + *pend = str; Py_XDECREF(z); slen = strlen(orig_str) < 200 ? strlen(orig_str) : 200; strobj = PyUnicode_FromStringAndSize(orig_str, slen); if (strobj == NULL) return NULL; PyErr_Format(PyExc_ValueError, - "invalid literal for int() with base %d: %R", + "invalid literal for int() with base %d: %.200R", base, strobj); Py_DECREF(strobj); return NULL; } +/* Since PyLong_FromString doesn't have a length parameter, + * check here for possible NULs in the string. + * + * Reports an invalid literal as a bytes object. + */ +PyObject * +_PyLong_FromBytes(const char *s, Py_ssize_t len, int base) +{ + PyObject *result, *strobj; + char *end = NULL; + + result = PyLong_FromString((char*)s, &end, base); + if (end == NULL || (result != NULL && end == s + len)) + return result; + Py_XDECREF(result); + strobj = PyBytes_FromStringAndSize(s, Py_MIN(len, 200)); + if (strobj != NULL) { + PyErr_Format(PyExc_ValueError, + "invalid literal for int() with base %d: %.200R", + base, strobj); + Py_DECREF(strobj); + } + return NULL; +} + PyObject * PyLong_FromUnicode(Py_UNICODE *u, Py_ssize_t length, int base) { @@ -2294,9 +2332,8 @@ PyObject * PyLong_FromUnicodeObject(PyObject *u, int base) { - PyObject *result; - PyObject *asciidig; - char *buffer, *end; + PyObject *result, *asciidig; + char *buffer, *end = NULL; Py_ssize_t buflen; asciidig = _PyUnicode_TransformDecimalAndSpaceToASCII(u); @@ -2305,17 +2342,22 @@ buffer = PyUnicode_AsUTF8AndSize(asciidig, &buflen); if (buffer == NULL) { Py_DECREF(asciidig); - return NULL; - } - result = PyLong_FromString(buffer, &end, base); - if (result != NULL && end != buffer + buflen) { - PyErr_SetString(PyExc_ValueError, - "null byte in argument for int()"); - Py_DECREF(result); - result = NULL; - } - Py_DECREF(asciidig); - return result; + if (!PyErr_ExceptionMatches(PyExc_UnicodeEncodeError)) + return NULL; + } + else { + result = PyLong_FromString(buffer, &end, base); + if (end == NULL || (result != NULL && end == buffer + buflen)) { + Py_DECREF(asciidig); + return result; + } + Py_DECREF(asciidig); + Py_XDECREF(result); + } + PyErr_Format(PyExc_ValueError, + "invalid literal for int() with base %d: %.200R", + base, u); + return NULL; } /* forward */ @@ -4319,23 +4361,12 @@ if (PyUnicode_Check(x)) return PyLong_FromUnicodeObject(x, (int)base); else if (PyByteArray_Check(x) || PyBytes_Check(x)) { - /* Since PyLong_FromString doesn't have a length parameter, - * check here for possible NULs in the string. */ char *string; - Py_ssize_t size = Py_SIZE(x); if (PyByteArray_Check(x)) string = PyByteArray_AS_STRING(x); else string = PyBytes_AS_STRING(x); - if (strlen(string) != (size_t)size || !size) { - /* We only see this if there's a null byte in x or x is empty, - x is a bytes or buffer, *and* a base is given. */ - PyErr_Format(PyExc_ValueError, - "invalid literal for int() with base %d: %R", - (int)base, x); - return NULL; - } - return PyLong_FromString(string, NULL, (int)base); + return _PyLong_FromBytes(string, Py_SIZE(x), (int)base); } else { PyErr_SetString(PyExc_TypeError, diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -1415,7 +1415,10 @@ # certname (from config.py) should be (a substring of) # the certificate subject, e.g. "Python Software Foundation" if certname: - os.system('signtool sign /n "%s" /t http://timestamp.verisign.com/scripts/timestamp.dll %s' % (certname, msiname)) + os.system('signtool sign /n "%s" ' + '/t http://timestamp.verisign.com/scripts/timestamp.dll ' + '/d "Python %s" ' + '%s' % (certname, full_current_version, msiname)) if pdbzip: build_pdbzip() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 08:33:21 2013 From: python-checkins at python.org (larry.hastings) Date: Sun, 4 Aug 2013 08:33:21 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Cycled_Misc/NEWS_for_alpha?= =?utf-8?q?_2=2C_touched_patchlevel=2E?= Message-ID: <3c7C3j35Bpz7Ljd@mail.python.org> http://hg.python.org/cpython/rev/f434c39a3434 changeset: 85013:f434c39a3434 user: Larry Hastings date: Sat Aug 03 23:29:24 2013 -0700 summary: Cycled Misc/NEWS for alpha 2, touched patchlevel. files: Include/patchlevel.h | 2 +- Misc/NEWS | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletions(-) diff --git a/Include/patchlevel.h b/Include/patchlevel.h --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -23,7 +23,7 @@ #define PY_RELEASE_SERIAL 1 /* Version as a string */ -#define PY_VERSION "3.4.0a1" +#define PY_VERSION "3.4.0a1+" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -2,6 +2,18 @@ Python News +++++++++++ +What's New in Python 3.4.0 Alpha 2? +=================================== + +Projected Release date: 2013-09-08 + +Core and Builtins +----------------- + +Library +------- + + What's New in Python 3.4.0 Alpha 1? =================================== -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 08:33:22 2013 From: python-checkins at python.org (larry.hastings) Date: Sun, 4 Aug 2013 08:33:22 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_default_-=3E_default?= =?utf-8?q?=29=3A_Merging_the_3=2E4=2E0a1_head=2E?= Message-ID: <3c7C3k6PxLz7Lk3@mail.python.org> http://hg.python.org/cpython/rev/77558617b2ba changeset: 85014:77558617b2ba parent: 85013:f434c39a3434 parent: 85008:96f45011957e user: Larry Hastings date: Sat Aug 03 23:30:13 2013 -0700 summary: Merging the 3.4.0a1 head. files: Doc/library/xml.etree.elementtree.rst | 5 +++-- Lib/sre_compile.py | 4 ++-- Lib/test/test_unicode.py | 2 -- Lib/test/test_xml_etree.py | 14 ++++++++++++++ Lib/xml/etree/ElementPath.py | 6 ++++-- 5 files changed, 23 insertions(+), 8 deletions(-) diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -416,7 +416,8 @@ and ``"end-ns"`` (the "ns" events are used to get detailed namespace information). If *events* is omitted, only ``"end"`` events are reported. *parser* is an optional parser instance. If not given, the standard - :class:`XMLParser` parser is used. Returns an :term:`iterator` providing + :class:`XMLParser` parser is used. *parser* can only use the default + :class:`TreeBuilder` as a target. Returns an :term:`iterator` providing ``(event, elem)`` pairs. Note that while :func:`iterparse` builds the tree incrementally, it issues @@ -880,7 +881,7 @@ events are used to get detailed namespace information). If *events* is omitted, only ``"end"`` events are reported. *parser* is an optional parser instance. If not given, the standard :class:`XMLParser` parser is - used. + used. *parser* can only use the default :class:`TreeBuilder` as a target. .. method:: data_received(data) diff --git a/Lib/sre_compile.py b/Lib/sre_compile.py --- a/Lib/sre_compile.py +++ b/Lib/sre_compile.py @@ -351,8 +351,8 @@ def _simple(av): # check if av is a "simple" operator lo, hi = av[2].getwidth() - if lo == 0 and hi == MAXREPEAT: - raise error("nothing to repeat") + #if lo == 0 and hi == MAXREPEAT: + # raise error("nothing to repeat") return lo == hi == 1 and av[2][0][0] != SUBPATTERN def _compile_info(code, pattern, flags): diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -1735,8 +1735,6 @@ self.assertRaises(TypeError, "hello".encode, 42, 42, 42) # Error handling (lone surrogate in PyUnicode_TransformDecimalToASCII()) - self.assertRaises(UnicodeError, int, "\ud800") - self.assertRaises(UnicodeError, int, "\udf00") self.assertRaises(UnicodeError, float, "\ud800") self.assertRaises(UnicodeError, float, "\udf00") self.assertRaises(UnicodeError, complex, "\ud800") diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -1839,6 +1839,20 @@ summarize_list(e.findall(".//{http://effbot.org/ns}tag")), ['{http://effbot.org/ns}tag'] * 3) + def test_findall_different_nsmaps(self): + root = ET.XML(''' + + + + + ''') + nsmap = {'xx': 'X'} + self.assertEqual(len(root.findall(".//xx:b", namespaces=nsmap)), 2) + self.assertEqual(len(root.findall(".//b", namespaces=nsmap)), 2) + nsmap = {'xx': 'Y'} + self.assertEqual(len(root.findall(".//xx:b", namespaces=nsmap)), 1) + self.assertEqual(len(root.findall(".//b", namespaces=nsmap)), 2) + def test_bad_find(self): e = ET.XML(SAMPLE_XML) with self.assertRaisesRegex(SyntaxError, 'cannot use absolute path'): diff --git a/Lib/xml/etree/ElementPath.py b/Lib/xml/etree/ElementPath.py --- a/Lib/xml/etree/ElementPath.py +++ b/Lib/xml/etree/ElementPath.py @@ -249,10 +249,12 @@ def iterfind(elem, path, namespaces=None): # compile selector pattern + cache_key = (path, None if namespaces is None + else tuple(sorted(namespaces.items()))) if path[-1:] == "/": path = path + "*" # implicit all (FIXME: keep this?) try: - selector = _cache[path] + selector = _cache[cache_key] except KeyError: if len(_cache) > 100: _cache.clear() @@ -272,7 +274,7 @@ token = next() except StopIteration: break - _cache[path] = selector + _cache[cache_key] = selector # execute selector pattern result = [elem] context = _SelectorContext(elem) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 10:31:08 2013 From: python-checkins at python.org (victor.stinner) Date: Sun, 4 Aug 2013 10:31:08 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Complete_What=27s_New_in_P?= =?utf-8?q?ython_3=2E4?= Message-ID: <3c7Fgc4v68zPsH@mail.python.org> http://hg.python.org/cpython/rev/b3ff35b53c4c changeset: 85015:b3ff35b53c4c user: Victor Stinner date: Sun Aug 04 10:30:57 2013 +0200 summary: Complete What's New in Python 3.4 files: Doc/whatsnew/3.4.rst | 9 +++++++-- 1 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -245,10 +245,13 @@ stat ---- -The stat module is now backed by a C implementation in :mod:`_stat`. A C +The :mod:`stat` module is now backed by a C implementation in :mod:`_stat`. A C implementation is required as most of the values aren't standardized and platform-dependent. (Contributed by Christian Heimes in :issue:`11016`.) +The module supports new file types: door, event port and whiteout. + + Optimizations ============= @@ -271,7 +274,9 @@ Unsupported Operating Systems ----------------------------- -* None yet. +* OS/2 +* VMS (issue 16136) +* Windows 2000 Deprecated Python modules, functions and methods -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 10:32:37 2013 From: python-checkins at python.org (victor.stinner) Date: Sun, 4 Aug 2013 10:32:37 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Hum=2C_VMS_code_is_still_p?= =?utf-8?q?resent?= Message-ID: <3c7FjK0YyFzSZC@mail.python.org> http://hg.python.org/cpython/rev/c3e6df110737 changeset: 85016:c3e6df110737 user: Victor Stinner date: Sun Aug 04 10:32:26 2013 +0200 summary: Hum, VMS code is still present files: Doc/whatsnew/3.4.rst | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -275,7 +275,6 @@ ----------------------------- * OS/2 -* VMS (issue 16136) * Windows 2000 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 10:48:57 2013 From: python-checkins at python.org (larry.hastings) Date: Sun, 4 Aug 2013 10:48:57 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Updated_release_schedule=2C_a?= =?utf-8?q?dded_some_notes_for_PEP_101_=28Doing_Python_releases=29=2E?= Message-ID: <3c7G496dTCzSjR@mail.python.org> http://hg.python.org/peps/rev/1511a7c260dc changeset: 5031:1511a7c260dc parent: 4915:2f9702702b7c user: Larry Hastings date: Sat Aug 03 23:52:51 2013 -0700 summary: Updated release schedule, added some notes for PEP 101 (Doing Python releases). files: pep-0101.txt | 27 ++++++++++++++++++++++- pep-0429.txt | 47 ++++++++++++++++++++++++--------------- 2 files changed, 55 insertions(+), 19 deletions(-) diff --git a/pep-0101.txt b/pep-0101.txt --- a/pep-0101.txt +++ b/pep-0101.txt @@ -24,6 +24,31 @@ is organized as a recipe and you can actually print this out and check items off as you complete them. +Things You'll Need + + As a release manager there are a lot of resources you'll need to access. + Here's a hopefully-complete list. + + * A GPG key. + + Python releases are digitally signed with GPG; you'll need a key, + which hopefully will be on the "web of trust" with at least one of + the other release managers. + + * Access to ``dinsdale.python.org``, the Python web host. You'll + be uploading files directly here. + + * Write access to the website SVN repository + + Instructions here: + + http://www.python.org/dev/pydotorg/website/ + + * Write access to the PEP repository. + + If you're reading this, you probably already have this--the first + task of any release manager is to draft the release schedule. But + in case you just signed up... sucker! I mean, uh, congratulations! How to Make A Release @@ -140,7 +165,7 @@ ___ Regenerate Lib/pydoc-topics.py. cd to the Doc directory and type ``make pydoc-topics``. Then copy - ``build/pydoc-topics/pydoc-topics.py`` to ``../Lib/pydoc_topics.py``. + ``build/pydoc-topics/topics.py`` to ``../Lib/pydoc_data/topics.py``. ___ Check the docs for markup errors. diff --git a/pep-0429.txt b/pep-0429.txt --- a/pep-0429.txt +++ b/pep-0429.txt @@ -34,26 +34,32 @@ Release Schedule ================ -The anticipated schedule: +The releases so far: - 3.4.0 alpha 1: August 3, 2013 -- 3.4.0 alpha 2: August 31, 2013 -- 3.4.0 alpha 3: September 28, 2013 -- 3.4.0 alpha 4: October 19, 2013 -- 3.4.0 beta 1: November 23, 2013 + +The anticipated schedulefor future releases: + +- 3.4.0 alpha 2: September 8, 2013 +- 3.4.0 alpha 3: September 29, 2013 +- 3.4.0 alpha 4: October 20, 2013 +- 3.4.0 beta 1: November 24, 2013 (Beta 1 is also "feature freeze"--no new features beyond this point.) -- 3.4.0 beta 2: January 4, 2014 -- 3.4.0 candidate 1: January 18, 2014 -- 3.4.0 candidate 2: February 1, 2014 -- 3.4.0 final: February 22, 2014 +- 3.4.0 beta 2: January 45 2014 +- 3.4.0 candidate 1: January 19, 2014 +- 3.4.0 candidate 2: February 2, 2014 +- 3.4.0 final: February 23, 2014 .. don't forget to update final date above as well -Every release day listed here is the prospective day of tagging the release; -the actual availability of the release for download on python.org depends -on the schedules of the crew. +These are the days we expect to release the software; +Python core developers should note that the revision +used for the release will generally be "tagged" the day +before. However the actual availability of the release +for download on python.org depends on the schedules of +the crew and the existance of any release-blocking issues. Features for 3.4 @@ -61,7 +67,10 @@ Implemented / Final PEPs: -* None so far +* PEP 435, a standardized "enum" module +* PEP 442, improved semantics for object finalization +* PEP 443, adding single-dispatch generic functions to the standard library +* PEP 445, a new C API for implementing custom memory allocators Other final large-scale changes: @@ -69,17 +78,19 @@ Candidate PEPs: -* PEP 395: Qualified Names for Modules -* PEP 3143: Standard daemon process library -* PEP 3154: Pickle protocol version 4 +* PEP 431, improved support for time zone databases +* PEP 436, a build-time preprocessor for builtin argument parsing +* PEP 446, explicit controls on file descriptor inheritance +* PEP 447, support for __locallookup__ metaclass method +* PEP 448, additional unpacking generalizations +* PEP 3154, Pickle protocol revision 4 Other proposed large-scale changes: -* New packaging libraries, possibly deprecating "distutils" +* Introspection information for builtins * Addition of the "regex" module * Email version 6 * A standard event-loop interface -* Breaking out standard library and docs into separate repos Deferred to post-3.4: -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sun Aug 4 10:49:00 2013 From: python-checkins at python.org (larry.hastings) Date: Sun, 4 Aug 2013 10:49:00 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps_=28merge_default_-=3E_default=29?= =?utf-8?q?=3A_Merge=2E?= Message-ID: <3c7G4D4F3lz7Ljk@mail.python.org> http://hg.python.org/peps/rev/4e16704af966 changeset: 5032:4e16704af966 parent: 5031:1511a7c260dc parent: 5030:e4e32122be07 user: Larry Hastings date: Sun Aug 04 01:46:12 2013 -0700 summary: Merge. files: pep-0008.txt | 299 ++- pep-0315.txt | 23 +- pep-0394.txt | 55 +- pep-0413.txt | 2 +- pep-0426.txt | 2568 ++++++++++++++-------- pep-0426/pydist-schema.json | 329 ++ pep-0432.txt | 29 +- pep-0435.txt | 6 +- pep-0439.txt | 166 +- pep-0440.txt | 361 +- pep-0442.txt | 7 +- pep-0443.txt | 112 +- pep-0445.txt | 773 ++++++ pep-0446.txt | 248 ++ pep-0447.txt | 408 +++ pep-0448.txt | 247 ++ pep-0466/test_cloexec.py | 50 + pep-3150.txt | 120 +- pep-3156.txt | 30 +- pep2html.py | 2 +- 20 files changed, 4480 insertions(+), 1355 deletions(-) diff --git a/pep-0008.txt b/pep-0008.txt --- a/pep-0008.txt +++ b/pep-0008.txt @@ -3,12 +3,13 @@ Version: $Revision$ Last-Modified: $Date$ Author: Guido van Rossum , - Barry Warsaw + Barry Warsaw , + Nick Coghlan Status: Active Type: Process Content-Type: text/x-rst Created: 05-Jul-2001 -Post-History: 05-Jul-2001 +Post-History: 05-Jul-2001, 01-Aug-2013 Introduction @@ -23,6 +24,13 @@ Guido's original Python Style Guide essay, with some additions from Barry's style guide [2]_. +This style guide evolves over time as additional conventions are +identified and past conventions are rendered obsolete by changes in +the language itself. + +Many projects have their own coding style guidelines. In the event of any +conflicts, such project-specific guides take precedence for that project. + A Foolish Consistency is the Hobgoblin of Little Minds ====================================================== @@ -41,15 +49,24 @@ judgment. Look at other examples and decide what looks best. And don't hesitate to ask! -Two good reasons to break a particular rule: +In particular: do not break backwards compatibility just to comply with +this PEP! -1. When applying the rule would make the code less readable, even for - someone who is used to reading code that follows the rules. +Some other good reasons to ignore a particular guideline: + +1. When applying the guideline would make the code less readable, even + for someone who is used to reading code that follows this PEP. 2. To be consistent with surrounding code that also breaks it (maybe for historic reasons) -- although this is also an opportunity to clean up someone else's mess (in true XP style). +3. Because the code in question predates the introduction of the + guideline and there is no other reason to be modifying that code. + +4. When the code needs to remain compatible with older versions of + Python that don't support the feature recommended by the style guide. + Code lay-out ============ @@ -59,9 +76,6 @@ Use 4 spaces per indentation level. -For really old code that you don't want to mess up, you can continue -to use 8-space tabs. - Continuation lines should align wrapped elements either vertically using Python's implicit line joining inside parentheses, brackets and braces, or using a hanging indent. When using a hanging indent the @@ -101,7 +115,8 @@ var_three, var_four) The closing brace/bracket/parenthesis on multi-line constructs may -either line up under the last item of the list, as in:: +either line up under the first non-whitespace character of the last +line of list, as in:: my_list = [ 1, 2, 3, @@ -128,47 +143,78 @@ Tabs or Spaces? --------------- -Never mix tabs and spaces. +Spaces are the preferred indentation method. -The most popular way of indenting Python is with spaces only. The -second-most popular way is with tabs only. Code indented with a -mixture of tabs and spaces should be converted to using spaces -exclusively. When invoking the Python command line interpreter with +Tabs should be used solely to remain consistent with code that is +already indented with tabs. + +Python 3 disallows mixing the use of tabs and spaces for indentation. + +Python 2 code indented with a mixture of tabs and spaces should be +converted to using spaces exclusively. + +When invoking the Python 2 command line interpreter with the ``-t`` option, it issues warnings about code that illegally mixes tabs and spaces. When using ``-tt`` these warnings become errors. These options are highly recommended! -For new projects, spaces-only are strongly recommended over tabs. -Most editors have features that make this easy to do. Maximum Line Length ------------------- Limit all lines to a maximum of 79 characters. -There are still many devices around that are limited to 80 character -lines; plus, limiting windows to 80 characters makes it possible to -have several windows side-by-side. The default wrapping on such -devices disrupts the visual structure of the code, making it more -difficult to understand. Therefore, please limit all lines to a -maximum of 79 characters. For flowing long blocks of text (docstrings -or comments), limiting the length to 72 characters is recommended. +For flowing long blocks of text with fewer structural restrictions +(docstrings or comments), the line length should be limited to 72 +characters. + +Limiting the required editor window width makes it possible to have +several files open side-by-side, and works well when using code +review tools that present the two versions in adjacent columns. + +The default wrapping in most tools disrupts the visual structure of the +code, making it more difficult to understand. The limits are chosen to +avoid wrapping in editors with the window width set to 80, even +if the tool places a marker glyph in the final column when wrapping +lines. Some web based tools may not offer dynamic line wrapping at all. + +Some teams strongly prefer a longer line length. For code maintained +exclusively or primarily by a team that can reach agreement on this +issue, it is okay to increase the nominal line length from 80 to +100 characters (effectively increasing the maximum length to 99 +characters), provided that comments and docstrings are still wrapped +at 72 characters. + +The Python standard library is conservative and requires limiting +lines to 79 characters (and docstrings/comments to 72). The preferred way of wrapping long lines is by using Python's implied line continuation inside parentheses, brackets and braces. Long lines can be broken over multiple lines by wrapping expressions in parentheses. These should be used in preference to using a backslash -for line continuation. Make sure to indent the continued line -appropriately. The preferred place to break around a binary operator -is *after* the operator, not before it. Some examples:: +for line continuation. + +Backslashes may still be appropriate at times. For example, long, +multiple ``with``-statements cannot use implicit continuation, so +backslashes are acceptable:: + + with open('/path/to/some/file/you/want/to/read') as file_1, \ + open('/path/to/some/file/being/written', 'w') as file_2: + file_2.write(file_1.read()) + +Another such case is with ``assert`` statements. + +Make sure to indent the continued line appropriately. The preferred +place to break around a binary operator is *after* the operator, not +before it. Some examples:: class Rectangle(Blob): def __init__(self, width, height, color='black', emphasis=None, highlight=0): if (width == 0 and height == 0 and - color == 'red' and emphasis == 'strong' or - highlight > 100): + color == 'red' and emphasis == 'strong' or + highlight > 100): raise ValueError("sorry, you lose") if width == 0 and height == 0 and (color == 'red' or emphasis is None): @@ -198,18 +244,21 @@ Note, some editors and web-based code viewers may not recognize control-L as a form feed and will show another glyph in its place. -Encodings (PEP 263) -------------------- -Code in the core Python distribution should always use the ASCII or -Latin-1 encoding (a.k.a. ISO-8859-1). For Python 3.0 and beyond, -UTF-8 is preferred over Latin-1, see PEP 3120. +Source File Encoding +-------------------- -Files using ASCII should not have a coding cookie. Latin-1 (or UTF-8) -should only be used when a comment or docstring needs to mention an -author name that requires Latin-1; otherwise, using ``\x``, ``\u`` or -``\U`` escapes is the preferred way to include non-ASCII data in -string literals. +Code in the core Python distribution should always use UTF-8 (or ASCII +in Python 2). + +Files using ASCII (in Python 2) or UTF-8 (in Python 3) should not have +an encoding declaration. + +In the standard library, non-default encodings should be used only for +test purposes or when a comment or docstring needs to mention an author +name that contains non-ASCII characters; otherwise, using ``\x``, +``\u``, ``\U``, or ``\N`` escapes is the preferred way to include +non-ASCII data in string literals. For Python 3.0 and beyond, the following policy is prescribed for the standard library (see PEP 3131): All identifiers in the Python @@ -253,11 +302,27 @@ Put any relevant ``__all__`` specification after the imports. -- Relative imports for intra-package imports are highly discouraged. - Always use the absolute package path for all imports. Even now that - PEP 328 is fully implemented in Python 2.5, its style of explicit - relative imports is actively discouraged; absolute imports are more - portable and usually more readable. +- Absolute imports are recommended, as they are usually more readable + and tend to be better behaved (or at least give better error + messages) if the import system is incorrectly configured (such as + when a directory inside a package ends up on ``sys.path``):: + + import mypkg.sibling + from mypkg import sibling + from mypkg.sibling import example + + However, explicit relative imports are an acceptable alternative to + absolute imports, especially when dealing with complex package layouts + where using absolute imports would be unnecessarily verbose:: + + from . import sibling + from .sibling import example + + Standard library code should avoid complex package layouts and always + use absolute imports. + + Implicit relative imports should *never* be used and have been removed + in Python 3. - When importing a class from a class-containing module, it's usually okay to spell this:: @@ -272,6 +337,18 @@ and use "myclass.MyClass" and "foo.bar.yourclass.YourClass". +- Wildcard imports (``from import *``) should be avoided, as + they make it unclear which names are present in the namespace, + confusing both readers and many automated tools. There is one + defensible use case for a wildcard import, which is to republish an + internal interface as part of a public API (for example, overwriting + a pure Python implementation of an interface with the definitions + from an optional accelerator module and exactly which definitions + will be overwritten isn't known in advance). + + When republishing names this way, the guidelines below regarding + public and internal interfaces still apply. + Whitespace in Expressions and Statements ======================================== @@ -330,7 +407,7 @@ - If operators with different priorities are used, consider adding whitespace around the operators with the lowest priority(ies). Use - your own judgement; however, never use more than one space, and + your own judgment; however, never use more than one space, and always have the same amount of whitespace on both sides of a binary operator. @@ -747,6 +824,36 @@ advanced callers. +Public and internal interfaces +------------------------------ + +Any backwards compatibility guarantees apply only to public interfaces. +Accordingly, it is important that users be able to clearly distinguish +between public and internal interfaces. + +Documented interfaces are considered public, unless the documentation +explicitly declares them to be provisional or internal interfaces exempt +from the usual backwards compatibility guarantees. All undocumented +interfaces should be assumed to be internal. + +To better support introspection, modules should explicitly declare the +names in their public API using the ``__all__`` attribute. Setting +``__all__`` to an empty list indicates that the module has no public API. + +Even with ``__all__`` set appropriately, internal interfaces (packages, +modules, classes, functions, attributes or other names) should still be +prefixed with a single leading underscore. + +An interface is also considered internal if any containing namespace +(package, module or class) is considered internal. + +Imported names should always be considered an implementation detail. +Other modules must not rely on indirect access to such imported names +unless they are an explicitly documented part of the containing module's +API, such as ``os.path`` or a package's ``__init__`` module that exposes +functionality from submodules. + + Programming Recommendations =========================== @@ -756,10 +863,12 @@ For example, do not rely on CPython's efficient implementation of in-place string concatenation for statements in the form ``a += b`` - or ``a = a + b``. Those statements run more slowly in Jython. In - performance sensitive parts of the library, the ``''.join()`` form - should be used instead. This will ensure that concatenation occurs - in linear time across various implementations. + or ``a = a + b``. This optimization is fragile even in CPython (it + only works for some types) and isn't present at all in implementations + that don't use refcounting. In performance sensitive parts of the + library, the ``''.join()`` form should be used instead. This will + ensure that concatenation occurs in linear time across various + implementations. - Comparisons to singletons like None should always be done with ``is`` or ``is not``, never the equality operators. @@ -786,29 +895,59 @@ operator. However, it is best to implement all six operations so that confusion doesn't arise in other contexts. -- Use class-based exceptions. +- Always use a def statement instead of an assignment statement that binds + a lambda expression directly to a name. - String exceptions in new code are forbidden, and this language - feature has been removed in Python 2.6. + Yes:: - Modules or packages should define their own domain-specific base - exception class, which should be subclassed from the built-in - Exception class. Always include a class docstring. E.g.:: + def f(x): return 2*x - class MessageError(Exception): - """Base class for errors in the email package.""" + No:: + + f = lambda x: 2*x + + The first form means that the name of the resulting function object is + specifically 'f' instead of the generic ''. This is more + useful for tracebacks and string representations in general. The use + of the assignment statement eliminates the sole benefit a lambda + expression can offer over an explicit def statement (i.e. that it can + be embedded inside a larger expression) + +- Derive exceptions from ``Exception`` rather than ``BaseException``. + Direct inheritance from ``BaseException`` is reserved for exceptions + where catching them is almost always the wrong thing to do. + + Design exception hierarchies based on the distinctions that code + *catching* the exceptions is likely to need, rather than the locations + where the exceptions are raised. Aim to answer the question + "What went wrong?" programmatically, rather than only stating that + "A problem occurred" (see PEP 3151 for an example of this lesson being + learned for the builtin exception hierarchy) Class naming conventions apply here, although you should add the - suffix "Error" to your exception classes, if the exception is an - error. Non-error exceptions need no special suffix. + suffix "Error" to your exception classes if the exception is an + error. Non-error exceptions that are used for non-local flow control + or other forms of signaling need no special suffix. -- When raising an exception, use ``raise ValueError('message')`` +- Use exception chaining appropriately. In Python 3, "raise X from Y" + should be used to indicate explicit replacement without losing the + original traceback. + + When deliberately replacing an inner exception (using "raise X" in + Python 2 or "raise X from None" in Python 3.3+), ensure that relevant + details are transferred to the new exception (such as preserving the + attribute name when converting KeyError to AttributeError, or + embedding the text of the original exception in the new exception + message). + +- When raising an exception in Python 2, use ``raise ValueError('message')`` instead of the older form ``raise ValueError, 'message'``. - The paren-using form is preferred because when the exception - arguments are long or include string formatting, you don't need to - use line continuation characters thanks to the containing - parentheses. The older form is not legal syntax in Python 3. + The latter form is not legal Python 3 syntax. + + The paren-using form also means that when the exception arguments are + long or include string formatting, you don't need to use line + continuation characters thanks to the containing parentheses. - When catching exceptions, mention specific exceptions whenever possible instead of using a bare ``except:`` clause. @@ -838,6 +977,21 @@ exception propagate upwards with ``raise``. ``try...finally`` can be a better way to handle this case. +- When binding caught exceptions to a name, prefer the explicit name + binding syntax added in Python 2.6:: + + try: + process_data() + except Exception as exc: + raise DataProcessingFailedError(str(exc)) + + This is the only syntax supported in Python 3, and avoids the + ambiguity problems associated with the older comma-based syntax. + +- When catching operating system errors, prefer the explicit exception + hierarchy introduced in Python 3.3 over introspection of ``errno`` + values. + - Additionally, for all try/except clauses, limit the ``try`` clause to the absolute minimum amount of code necessary. Again, this avoids masking bugs. @@ -860,6 +1014,10 @@ # Will also catch KeyError raised by handle_value() return key_not_found(key) +- When a resource is local to a particular section of code, use a + ``with`` statement to ensure it is cleaned up promptly and reliably + after use. A try/finally statement is also acceptable. + - Context managers should be invoked through separate functions or methods whenever they do something other than acquire and release resources. For example: @@ -894,9 +1052,6 @@ Yes: if foo.startswith('bar'): No: if foo[:3] == 'bar': - The exception is if your code must work with Python 1.5.2 (but let's - hope not!). - - Object type comparisons should always use isinstance() instead of comparing types directly. :: @@ -905,11 +1060,15 @@ No: if type(obj) is type(1): When checking if an object is a string, keep in mind that it might - be a unicode string too! In Python 2.3, str and unicode have a + be a unicode string too! In Python 2, str and unicode have a common base class, basestring, so you can do:: if isinstance(obj, basestring): + Note that in Python 3, ``unicode`` and ``basestring`` no longer exist + (there is only ``str``) and a bytes object is no longer a kind of + string (it is a sequence of integers instead) + - For sequences, (strings, lists, tuples), use the fact that empty sequences are false. :: @@ -934,6 +1093,10 @@ annotation style. Instead, the annotations are left for users to discover and experiment with useful annotation styles. + It is recommended that third party experiments with annotations use an + associated decorator to indicate how the annotation should be + interpreted. + Early core developer attempts to use function annotations revealed inconsistent, ad-hoc annotation styles. For example: @@ -991,6 +1154,8 @@ .. [3] http://www.wikipedia.com/wiki/CamelCase +.. [4] PEP 8 modernisation, July 2013 + http://bugs.python.org/issue18472 Copyright ========= diff --git a/pep-0315.txt b/pep-0315.txt --- a/pep-0315.txt +++ b/pep-0315.txt @@ -4,7 +4,7 @@ Last-Modified: $Date$ Author: Raymond Hettinger W Isaac Carroll -Status: Deferred +Status: Rejected Type: Standards Track Content-Type: text/plain Created: 25-Apr-2003 @@ -21,19 +21,32 @@ Notice - Deferred; see + Rejected; see + http://mail.python.org/pipermail/python-ideas/2013-June/021610.html + + This PEP has been deferred since 2006; see http://mail.python.org/pipermail/python-dev/2006-February/060718.html Subsequent efforts to revive the PEP in April 2009 did not meet with success because no syntax emerged that could - compete with a while-True and an inner if-break. + compete with the following form: - A syntax was found for a basic do-while loop but it found - had little support because the condition was at the top: + while True: + + if not : + break + + + A syntax alternative to the one proposed in the PEP was found for + a basic do-while loop but it gained little support because the + condition was at the top: do ... while : + Users of the language are advised to use the while-True form with + an inner if-break when a do-while loop would have been appropriate. + Motivation diff --git a/pep-0394.txt b/pep-0394.txt --- a/pep-0394.txt +++ b/pep-0394.txt @@ -19,10 +19,17 @@ be portable across ``*nix`` systems, regardless of the default version of the Python interpreter (i.e. the version invoked by the ``python`` command). -* ``python2`` will refer to some version of Python 2.x -* ``python3`` will refer to some version of Python 3.x -* ``python`` *should* refer to the same target as ``python2`` but *may* - refer to ``python3`` on some bleeding edge distributions +* ``python2`` will refer to some version of Python 2.x. +* ``python3`` will refer to some version of Python 3.x. +* for the time being, all distributions *should* ensure that ``python`` + refers to the same target as ``python2``. +* however, end users should be aware that ``python`` refers to ``python3`` + on at least Arch Linux (that change is what prompted the creation of this + PEP), so ``python`` should be used in the shebang line only for scripts + that are source compatible with both Python 2 and 3. +* in preparation for an eventual change in the default version of Python, + Python 2 only scripts should either be updated to be source compatible + with Python 3 or else to use ``python2`` in the shebang line. Recommendation @@ -103,15 +110,29 @@ system. They will hopefully be helpful to any distributions considering making such a change. -* Distributions that only include ``python3`` in their base install (i.e. - they do not provide ``python2`` by default) along with those that are - aggressively trying to reach that point (and are willing to break third - party scripts while attempting to get there) are already beginning to alias - the ``python`` command to ``python3`` -* More conservative distributions that are less willing to tolerate breakage - of third party scripts continue to alias it to ``python2``. Until the - conventions described in this PEP are more widely adopted, having ``python`` - invoke ``python2`` will remain the recommended option. +* The main barrier to a distribution switching the ``python`` command from + ``python2`` to ``python3`` isn't breakage within the distribution, but + instead breakage of private third party scripts developed by sysadmins + and other users. Updating the ``python`` command to invoke ``python3`` + by default indicates that a distribution is willing to break such scripts + with errors that are potentially quite confusing for users that aren't + yet familiar with the backwards incompatible changes in Python 3. For + example, while the change of ``print`` from a statement to a builtin + function is relatively simple for automated converters to handle, the + SyntaxError from attempting to use the Python 2 notation in Python 3 is + thoroughly confusing if you aren't already aware of the change:: + + $ python3 -c 'print "Hello, world!"' + File "", line 1 + print "Hello, world!" + ^ + SyntaxError: invalid syntax + +* Avoiding breakage of such third party scripts is the key reason this + PEP recommends that ``python`` continue to refer to ``python2`` for the + time being. Until the conventions described in this PEP are more widely + adopted, having ``python`` invoke ``python2`` will remain the recommended + option. * The ``pythonX.X`` (e.g. ``python2.6``) commands exist on some systems, on which they invoke specific minor versions of the Python interpreter. It can be useful for distribution-specific packages to take advantage of these @@ -148,10 +169,13 @@ ``python`` command is only executed in an interactive manner as a user convenience, or to run scripts that are source compatible with both Python 2 and Python 3. +* one symbolic date being considered for a possible change to the official + recommendation in this PEP is the planned switch of Python 2.7 from full + maintenance to security update only status in 2015 (see PEP 373). Backwards Compatibility -========================= +======================= A potential problem can arise if a script adhering to the ``python2``/``python3`` convention is executed on a system not supporting @@ -217,7 +241,8 @@ This PEP deliberately excludes any proposals relating to Microsoft Windows, as devising an equivalent solution for Windows was deemed too complex to handle here. PEP 397 and the related discussion on the python-dev mailing list -address this issue. +address this issue (like this PEP, the PEP 397 launcher invokes Python 2 by +default if versions of both Python 2 and 3 are installed on the system). References diff --git a/pep-0413.txt b/pep-0413.txt --- a/pep-0413.txt +++ b/pep-0413.txt @@ -627,7 +627,7 @@ # Add maint.1, compat.1 etc as releases are made -Putting the version information in the directory heirarchy isn't strictly +Putting the version information in the directory hierarchy isn't strictly necessary (since the NEWS file generator could figure out from the version history), but does make it easier for *humans* to keep the different versions in order. diff --git a/pep-0426.txt b/pep-0426.txt --- a/pep-0426.txt +++ b/pep-0426.txt @@ -12,7 +12,8 @@ Content-Type: text/x-rst Requires: 440 Created: 30 Aug 2012 -Post-History: 14 Nov 2012, 5 Feb 2013, 7 Feb 2013, 9 Feb 2013, 27-May-2013 +Post-History: 14 Nov 2012, 5 Feb 2013, 7 Feb 2013, 9 Feb 2013, + 27 May 2013, 20 Jun 2013, 23 Jun 2013, 14 Jul 2013 Replaces: 345 @@ -21,8 +22,7 @@ This PEP describes a mechanism for publishing and exchanging metadata related to Python distributions. It includes specifics of the field names, -and their semantics and -usage. +and their semantics and usage. This document specifies version 2.0 of the metadata format. Version 1.0 is specified in PEP 241. @@ -42,7 +42,9 @@ "I" in this doc refers to Nick Coghlan. Daniel and Donald either wrote or contributed to earlier versions, and have been providing feedback as this - initial draft of the JSON-based rewrite has taken shape. + JSON-based rewrite has taken shape. Daniel and Donald have also been + vetting the proposal as we go to ensure it is practical to implement for + both clients and index servers. Metadata 2.0 represents a major upgrade to the Python packaging ecosystem, and attempts to incorporate experience gained over the 15 years(!) since @@ -61,14 +63,15 @@ * this PEP, covering the core metadata format * PEP 440, covering the versioning identification and selection scheme * a new PEP to define v2.0 of the sdist format - * an updated wheel PEP (v1.1) to add pymeta.json - * an updated installation database PEP both for pymeta.json and to add - a linking scheme to better support runtime selection of dependencies, - as well as recording which extras are currently available + * an updated wheel PEP (v1.1) to add pydist.json (and possibly convert + the wheel metadata file from Key:Value to JSON) + * an updated installation database PEP both for pydist.json (and possibly convert + the wheel metadata file from Key:Value to JSON) + * an alternative to \*.pth files that avoids system global side effects + and better supports runtime selection of dependencies * a new static config PEP to standardise metadata generation and creation of sdists - * PEP 439, covering a bootstrapping mechanism for ``pip`` - * a distutils upgrade PEP, adding metadata v2.0 and wheel support. + * a PEP to cover bundling ``pip`` with the CPython installers It's going to take a while to work through all of these and make them a reality. The main change from our last attempt at this is that we're @@ -84,137 +87,287 @@ an irrelevant distraction for future readers. -Definitions -=========== +A Note on Time Frames +===================== + +There's a lot of work going on in the Python packaging space at the moment. +In the near term (up until the release of Python 3.4), those efforts will be +focused on the existing metadata standards, both those defined in Python +Enhancement Proposals, and the de facto standards defined by the setuptools +project. + +This PEP is about setting out a longer term goal for the ecosystem that +captures those existing capabilities in a format that is easier to work +with. There are still a number of key open questions (mostly related to +source based distribution), and those won't be able to receive proper +attention from the development community until the other near term +concerns have been resolved. + + +Purpose +======= + +The purpose of this PEP is to define a common metadata interchange format +for communication between software publication tools and software integration +tools in the Python ecosystem. One key aim is to support full dependency +analysis in that ecosystem without requiring the execution of arbitrary +Python code by those doing the analysis. Another aim is to encourage good +software distribution practices by default, while continuing to support the +current practices of almost all existing users of the Python Package Index +(both publishers and integrators). + +The design draws on the Python community's 15 years of experience with +distutils based software distribution, and incorporates ideas and concepts +from other distribution systems, including Python's setuptools, pip and +other projects, Ruby's gems, Perl's CPAN, Node.js's npm, PHP's composer +and Linux packaging systems such as RPM and APT. + + +Development, Distribution and Deployment of Python Software +=========================================================== + +The metadata design in this PEP is based on a particular conceptual model +of the software development and distribution process. This model consists of +the following phases: + +* Software development: this phase involves working with a source checkout + for a particular application to add features and fix bugs. It is + expected that developers in this phase will need to be able to build the + software, run the software's automated test suite, run project specific + utility scripts and publish the software. + +* Software publication: this phase involves taking the developed software + and making it available for use by software integrators. This includes + creating the descriptive metadata defined in this PEP, as well making the + software available (typically by uploading it to an index server). + +* Software integration: this phase involves taking published software + components and combining them into a coherent, integrated system. This + may be done directly using Python specific cross-platform tools, or it may + be handled through conversion to development language neutral platform + specific packaging systems. + +* Software deployment: this phase involves taking integrated software + components and deploying them on to the target system where the software + will actually execute. + +The publication and integration phases are collectively referred to as +the distribution phase, and the individual software components distributed +in that phase are referred to as "distributions". + +The exact details of these phases will vary greatly for particular use cases. +Deploying a web application to a public Platform-as-a-Service provider, +publishing a new release of a web framework or scientific library, +creating an integrated Linux distribution or upgrading a custom application +running in a secure enclave are all situations this metadata design should +be able to handle. + +The complexity of the metadata described in this PEP thus arises directly +from the actual complexities associated with software development, +distribution and deployment in a wide range of scenarios. + + +Supporting definitions +---------------------- The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be interpreted as described in RFC 2119. -"Distributions" are deployable software components published through an index -server or otherwise made available for installation. - -"Versions" are uniquely identified snapshots of a distribution. - -"Distribution archives" are the packaged files which are used to publish -and distribute the software. - -"Source archives" require build tools to be available on the target -system. +"Projects" are software components that are made available for integration. +Projects include Python libraries, frameworks, scripts, plugins, +applications, collections of data or other resources, and various +combinations thereof. Public Python projects are typically registered on +the `Python Package Index`_. + +"Releases" are uniquely identified snapshots of a project. + +"Distributions" are the packaged files which are used to publish +and distribute a release. + +"Source archive" and "VCS checkout" both refer to the raw source code for +a release, prior to creation of an sdist or binary archive. + +An "sdist" is a publication format providing the distribution metadata and +and any source files that are essential to creating a binary archive for +the distribution. Creating a binary archive from an sdist requires that +the appropriate build tools be available on the system. "Binary archives" only require that prebuilt files be moved to the correct location on the target system. As Python is a dynamically bound -cross-platform language, many "binary" archives will contain only pure -Python source code. +cross-platform language, many so-called "binary" archives will contain only +pure Python source code. + +"Contributors" are individuals and organizations that work together to +develop a software component. + +"Publishers" are individuals and organizations that make software components +available for integration (typically by uploading distributions to an +index server) + +"Integrators" are individuals and organizations that incorporate published +distributions as components of an application or larger system. "Build tools" are automated tools intended to run on development systems, producing source and binary distribution archives. Build tools may also be -invoked by installation tools in order to install software distributed as -source archives rather than prebuilt binary archives. +invoked by integration tools in order to build software distributed as +sdists rather than prebuilt binary archives. "Index servers" are active distribution registries which publish version and dependency metadata and place constraints on the permitted metadata. +"Public index servers" are index servers which allow distribution uploads +from untrusted third parties. The `Python Package Index`_ is a public index +server. + "Publication tools" are automated tools intended to run on development systems and upload source and binary distribution archives to index servers. -"Installation tools" are automated tools intended to run on production -systems, consuming source and binary distribution archives from an index -server or other designated location and deploying them to the target system. +"Integration tools" are automated tools that consume the metadata and +distribution archives published by an index server or other designated +source, and make use of them in some fashion, such as installing them or +converting them to a platform specific packaging format. + +"Installation tools" are integration tools specifically intended to run on +deployment targets, consuming source and binary distribution archives from +an index server or other designated location and deploying them to the target +system. "Automated tools" is a collective term covering build tools, index servers, -publication tools, installation tools and any other software that produces +publication tools, integration tools and any other software that produces or consumes distribution version and dependency metadata. -"Projects" refers to the developers that manage the creation of a particular -distribution. - "Legacy metadata" refers to earlier versions of this metadata specification, along with the supporting metadata file formats defined by the ``setuptools`` project. - -Development and distribution activities -======================================= - -Making effective use of a common metadata format requires a common -understanding of the most complex development and distribution model -the format is intended to support. The metadata format described in this -PEP is based on the following activities: - -* Development: during development, a user is operating from a - source checkout (or equivalent) for the current project. Dependencies must - be available in order to build, test and create a source archive of the - distribution. - - .. note:: - As a generated file, the full distribution metadata often won't be - available in a raw source checkout or tarball. In such cases, the - relevant distribution metadata is generally obtained from another - location, such as the last published release, or by generating it - based on a command given in a standard input file. This spec - deliberately avoids handling that scenario, instead falling back on - the existing ``setup.py`` functionality. - -* Build: the build step is the process of turning a source archive into a - binary archive. Dependencies must be available in order to build and - create a binary archive of the distribution (including any documentation - that is installed on target systems). - -* Deployment: the deployment phase consists of two subphases: - - * Installation: the installation phase involves getting the distribution - and all of its runtime dependencies onto the target system. In this - phase, the distribution may already be on the system (when upgrading or - reinstalling) or else it may be a completely new installation. - - * Usage: the usage phase, also referred to as "runtime", is normal usage - of the distribution after it has been installed on the target system. - -The metadata format described in this PEP is designed to enable the -following: - -* It should be practical to have separate development systems, build systems - and deployment systems. -* It should be practical to install dependencies needed specifically to - build source archives only on development systems. -* It should be practical to install dependencies needed specifically to - build the software only on development and build systems, as well as - optionally on deployment systems if installation from source archives - is needed. -* It should be practical to install dependencies needed to run the - distribution only on development and deployment systems. -* It should be practical to install the dependencies needed to run a - distribution's test suite only on development systems, as well as - optionally on deployment systems. -* It should be practical for repackagers to separate out the build - dependencies needed to build the application itself from those required - to build its documentation (as the documentation often doesn't need to - be rebuilt when porting an application to a different platform). +"Distro" is used as the preferred term for Linux distributions, to help +avoid confusion with the Python-specific meaning of the term "distribution". + +"Dist" is the preferred abbreviation for "distributions" in the sense defined +in this PEP. + +"Qualified name" is a dotted Python identifier. For imported modules and +packages, the qualified name is available as the ``__name__`` attribute, +while for functions and classes it is available as the ``__qualname__`` +attribute. + +A "fully qualified name" uniquely locates an object in the Python module +namespace. For imported modules and packages, it is the same as the +qualified name. For other Python objects, the fully qualified name consists +of the qualified name of the containing module or package, a colon (``:``) +and the qualified name of the object relative to the containing module or +package. + + +Integration and deployment of distributions +------------------------------------------- + +The primary purpose of the distribution metadata is to support integration +and deployment of distributions as part of larger applications and systems. + +Integration and deployment can in turn be broken down into further substeps. + +* Build: the build step is the process of turning a VCS checkout, source + archive or sdist into a binary archive. Dependencies must be available + in order to build and create a binary archive of the distribution + (including any documentation that is installed on target systems). + +* Installation: the installation step involves getting the distribution + and all of its runtime dependencies onto the target system. In this + step, the distribution may already be on the system (when upgrading or + reinstalling) or else it may be a completely new installation. + +* Runtime: this is normal usage of a distribution after it has been + installed on the target system. + +These three steps may all occur directly on the target system. Alternatively +the build step may be separated out by using binary archives provided by the +publisher of the distribution, or by creating the binary archives on a +separate system prior to deployment. The advantage of the latter approach +is that it minimizes the dependencies that need to be installed on +deployment targets (as the build dependencies will be needed only on the +build systems). + +The published metadata for distributions SHOULD allow integrators, with the +aid of build and integration tools, to: + +* obtain the original source code that was used to create a distribution +* identify and retrieve the dependencies (if any) required to use a + distribution +* identify and retrieve the dependencies (if any) required to build a + distribution from source +* identify and retrieve the dependencies (if any) required to run a + distribution's test suite +* find resources on using and contributing to the project +* access sufficiently rich metadata to support contacting distribution + publishers through appropriate channels, as well as finding distributions + that are relevant to particular problems + + +Development and publication of distributions +-------------------------------------------- + +The secondary purpose of the distribution metadata is to support effective +collaboration amongst software contributors and publishers during the +development phase. + +The published metadata for distributions SHOULD allow contributors +and publishers, with the aid of build and publication tools, to: + +* perform all the same activities needed to effectively integrate and + deploy the distribution +* identify and retrieve the additional dependencies needed to develop and + publish the distribution +* specify the dependencies (if any) required to use the distribution +* specify the dependencies (if any) required to build the distribution + from source +* specify the dependencies (if any) required to run the distribution's + test suite +* specify the additional dependencies (if any) required to develop and + publish the distribution + + +Standard build system +--------------------- .. note:: - This "most complex supported scenario" is almost *exactly* what has to - happen to get an upstream Python package into a Linux distribution, and - is why the current crop of automatic Python metadata -> Linux distro - metadata converters have some serious issues, at least from the point of - view of complying with distro packaging policies: the information - they need to comply with those policies isn't available from the - upstream projects, and all current formats for publishing it are - distro specific. This means either upstreams have to maintain metadata - for multiple distributions (which rarely happens) or else repackagers - have to do a lot of work manually in order to separate out these - dependencies in a way that complies with those policies. - - One thing this PEP aims to do is define a metadata format that at least - has the *potential* to provide the info repackagers need, thus allowing - upstream Python projects and Linux distro repackagers to collaborate more - effectively (and, ideally, make it possible to reliably automate - the process of converting upstream Python distributions into policy - compliant distro packages). - - Some items in this section (and the contents of this note) will likely - end up moving down to the "Rationale for changes from PEP 345" section. + The standard build system currently described in the PEP is a draft based + on existing practices for projects using distutils or setuptools as their + build system (or other projects, like ``d2to1``, that expose a setup.py + file for backwards compatibility with existing tools) + + The specification doesn't currently cover expected argument support for + the commands, which is a limitation that needs to be addressed before the + PEP can be considered ready for acceptance. + + It is also possible that the "meta build system" will be separated out + into a distinct PEP in the coming months (similar to the separation of + the versioning and requirement specification standard out to PEP 440). + + If a `suitable API can be worked out `__, then it may + even be possible to switch to a more declarative API for build system + specification. + +Both development and integration of distributions relies on the ability to +build extension modules and perform other operations in a distribution +independent manner. + +The current iteration of the metadata relies on the +``distutils``/``setuptools`` commands system to support these necessary +development and integration activities: + +* ``python setup.py dist_info``: generate distribution metadata in place + given a source archive or VCS checkout +* ``python setup.py sdist``: create an sdist from a source archive + or VCS checkout +* ``python setup.py build_ext --inplace``: build extension modules in place + given an sdist, source archive or VCS checkout +* ``python setup.py test``: run the distribution's test suite in place + given an sdist, source archive or VCS checkout +* ``python setup.py bdist_wheel``: create a binary archive from an sdist, + source archive or VCS checkout Metadata format @@ -247,22 +400,30 @@ Automated tools MAY automatically derive valid values from other information sources (such as a version control system). +Automated tools, especially public index servers, MAY impose additional +length restrictions on metadata beyond those enumerated in this PEP. Such +limits SHOULD be imposed where necessary to protect the integrity of a +service, based on the available resources and the service provider's +judgment of reasonable metadata capacity requirements. + Metadata files -------------- -The information defined in this PEP is serialised to ``pymeta.json`` -files for some use cases. As indicated by the extension, these -are JSON-encoded files. Each file consists of a single serialised mapping, -with fields as described in this PEP. +The information defined in this PEP is serialised to ``pydist.json`` +files for some use cases. These are files containing UTF-8 encoded JSON +metadata. + +Each metadata file consists of a single serialised mapping, with fields as +described in this PEP. There are three standard locations for these metadata files: -* as a ``{distribution}-{version}.dist-info/pymeta.json`` file in an +* as a ``{distribution}-{version}.dist-info/pydist.json`` file in an ``sdist`` source distribution archive -* as a ``{distribution}-{version}.dist-info/pymeta.json`` file in a ``wheel`` +* as a ``{distribution}-{version}.dist-info/pydist.json`` file in a ``wheel`` binary distribution archive -* as a ``{distribution}-{version}.dist-info/pymeta.json`` file in a local +* as a ``{distribution}-{version}.dist-info/pydist.json`` file in a local Python installation database .. note:: @@ -270,21 +431,16 @@ These locations are to be confirmed, since they depend on the definition of sdist 2.0 and the revised installation database standard. There will also be a wheel 1.1 format update after this PEP is approved that - mandates 2.0+ metadata. + mandates provision of 2.0+ metadata. Other tools involved in Python distribution may also use this format. -It is expected that these metadata files will be generated by build tools -based on other input formats (such as ``setup.py``) rather than being -edited by hand. - -.. note:: - - It may be appropriate to add a "./setup.py dist_info" command to - setuptools to allow just the sdist metadata files to be generated - without having to build the full sdist archive. This would be - similar to the existing "./setup.py egg_info" command in setuptools, - which would continue to emit the legacy metadata format. +As JSON files are generally awkward to edit by hand, it is RECOMMENDED +that these metadata files be generated by build tools based on other +input formats (such as ``setup.py``) rather than being used directly as +a data input format. Generating the metadata as part of the publication +process also helps to deal with version specific fields (including the +source URL and the version field itself). For backwards compatibility with older installation tools, metadata 2.0 files MAY be distributed alongside legacy metadata. @@ -292,6 +448,10 @@ Index servers MAY allow distributions to be uploaded and installation tools MAY allow distributions to be installed with only legacy metadata. +Automated tools MAY attempt to automatically translate legacy metadata to +the format described in this PEP. Advice for doing so effectively is given +in Appendix A. + Essential dependency resolution metadata ---------------------------------------- @@ -304,39 +464,92 @@ fields: * ``metadata_version`` +* ``generator`` * ``name`` * ``version`` -* ``build_label`` -* ``version_url`` +* ``source_label`` +* ``source_url`` * ``extras`` -* ``requires`` -* ``may-require`` -* ``build-requires`` -* ``build-may-require`` -* ``dev-requires`` -* ``dev-may-require`` +* ``meta_requires`` +* ``run_requires`` +* ``test_requires`` +* ``build_requires`` +* ``dev_requires`` * ``provides`` * ``obsoleted_by`` * ``supports_environments`` When serialised to a file, the name used for this metadata set SHOULD -be ``pymeta-minimal.json``. - -Abbreviated metadata --------------------- - -Some metadata fields have the potential to contain a lot of information -that will rarely be referenced, greatly increasing storage requirements -without providing significant benefits. - -The abbreviated metadata for a distribution consists of all fields -*except* the following: - -* ``description`` -* ``contributors`` +be ``pydist-dependencies.json``. + + +Export metadata +--------------- + +Distributions may define components that are intended for use by other +distributions (such as plugins). As it can be beneficial to know whether or +not a distribution defines any such exports without needing to parse any +metadata, a suitable subset is defined for serialisation to a separate file +in the ``dist-info`` metadata directory. + +The external command metadata consists of the following fields: + +* ``metadata_version`` +* ``generator`` +* ``name`` +* ``version`` +* ``exports`` When serialised to a file, the name used for this metadata set SHOULD -be ``pymeta-short.json``. +be ``pydist-exports.json``. + + +Command metadata +---------------- + +Distributions may define commands that will be available from the command +line following installation. As it can be beneficial to know whether or not +a distribution has such commands without needing to parse any metadata, +a suitable subset is defined for serialisation to a separate file in the +``dist-info`` metadata directory. + +The external command metadata consists of the following fields: + +* ``metadata_version`` +* ``generator`` +* ``name`` +* ``version`` +* ``commands`` + +When serialised to a file, the name used for this metadata set SHOULD +be ``pydist-commands.json``. + + +Included documents +------------------ + +Rather than being incorporated directly into the structured metadata, some +supporting documents are included alongside the metadata file in the +``dist-info`` metadata directory. + +To accommodate the variety of existing naming conventions for these files, +they are explicitly identified in the ``document_names`` field, rather +than expecting index servers and other automated tools to identify them +automatically. + + +Metadata validation +------------------- + +A `jsonschema `__ description of +the distribution metadata is `available +`__. + +This schema does NOT currently handle validation of some of the more complex +string fields (instead treating them as opaque strings). + +Except where otherwise noted, all URL fields in the metadata MUST comply +with RFC 3986. Core metadata @@ -376,6 +589,17 @@ "metadata_version": "2.0" +Generator +--------- + +Name (and optional version) of the program that generated the file, +if any. A manually produced file would omit this field. + +Example:: + + "generator": "setuptools (0.9)" + + Name ---- @@ -391,7 +615,7 @@ * hyphens (``-``) * periods (``.``) -Distributions named MUST start and end with an ASCII letter or digit. +Distribution names MUST start and end with an ASCII letter or digit. Automated tools MUST reject non-compliant names. @@ -399,14 +623,14 @@ consider hyphens and underscores to be equivalent. Index servers MAY consider "confusable" characters (as defined by the -Unicode Consortium in `TR39: Unicode Security Mechanisms `__) to be +Unicode Consortium in `TR39: Unicode Security Mechanisms `_) to be equivalent. Index servers that permit arbitrary distribution name registrations from untrusted sources SHOULD consider confusable characters to be equivalent when registering new distributions (and hence reject them as duplicates). -Installation tools MUST NOT silently accept a confusable alternate +Integration tools MUST NOT silently accept a confusable alternate spelling as matching a requested distribution name. At time of writing, the characters in the ASCII subset designated as @@ -421,45 +645,6 @@ "name": "ComfyChair" -.. note:: - - Debian doesn't actually permit underscores in names, but that seems - unduly restrictive for this spec given the common practice of using - valid Python identifiers as Python distribution names. A Debian side - policy of converting underscores to hyphens seems easy enough to - implement (and the requirement to consider hyphens and underscores as - equivalent ensures that doing so won't introduce any conflicts). - - We're deliberately *not* following Python 3 down the path of arbitrary - unicode identifiers at this time. The security implications of doing so - are substantially worse in the software distribution use case (it opens - up far more interesting attack vectors than mere code obfuscation), the - existing tooling really only works properly if you abide by the stated - restrictions and changing it would require a *lot* of work for all - the automated tools in the chain. - - PyPI has recently been updated to reject non-compliant names for newly - registered projects, but existing non-compliant names are still - tolerated when using legacy metadata formats. Affected distributions - will need to change their names (typically be replacing spaces with - hyphens) before they can migrate to the new metadata formats. - - Donald Stufft ran an analysis, and the new restrictions impact less - than 230 projects out of the ~31k already on PyPI. This isn't that - surprising given the fact that many existing tools could already - exhibit odd behaviour when attempting to deal with non-compliant - names, implicitly discouraging the use of more exotic names. - - Of those projects, ~200 have the only non-compliant character as an - internal space (e.g. "Twisted Web"). These will be automatically - migrated by replacing the spaces with hyphens (e.g. "Twisted-Web"), - which is what you have to actually type to install these distributions - with ``setuptools`` (which powers both ``easy_install`` and ``pip``). - - The remaining ~30 will be investigated manually and decided upon on a - case by case basis how to migrate them to the new naming rules (in - consultation with the maintainers of those projects where possible). - Version ------- @@ -469,58 +654,88 @@ variety of flexible version specification mechanisms (see PEP 440 for details). +Version identifiers MUST comply with the format defined in PEP 440. + +Version identifiers MUST be unique within each project. + Example:: "version": "1.0a2" -Additional identifying metadata -=============================== - -This section specifies fields that provide other identifying details -that are unique to this distribution. +Summary +------- + +A short summary of what the distribution does. + +This field SHOULD contain fewer than 512 characters and MUST contain fewer +than 2048. + +This field SHOULD NOT contain any line breaks. + +A more complete description SHOULD be included as a separate file in the +sdist for the distribution. See `Document names`_ for details. + +Example:: + + "summary": "A module that is more fiendish than soft cushions." + + +Source code metadata +==================== + +This section specifies fields that provide identifying details for the +source code used to produce this distribution. All of these fields are optional. Automated tools MUST operate correctly if a distribution does not provide them, including failing cleanly when an operation depending on one of these fields is requested. -Build label ------------ - -A constrained identifying text string, as defined in PEP 440. Build labels -cannot be used in ordered version comparisons, but may be used to select -an exact version (see PEP 440 for details). - +Source label +------------ + +A constrained identifying text string, as defined in PEP 440. Source labels +cannot be used in version specifiers - they are included for information +purposes only. + +Source labels MUST meet the character restrictions defined in PEP 440. + +Source labels MUST be unique within each project and MUST NOT match any +defined version for the project. Examples:: - "build_label": "1.0.0-alpha.1" - - "build_label": "1.3.7+build.11.e0f985a" - - "build_label": "v1.8.1.301.ga0df26f" - - "build_label": "2013.02.17.dev123" - - -Version URL ------------ - -A string containing a full URL where this specific version of the -distribution can be downloaded. (This means that the URL can't be -something like ``"https://github.com/pypa/pip/archive/master.zip"``, but -instead must be ``"https://github.com/pypa/pip/archive/1.3.1.zip"``.) - -Some appropriate targets for a version URL are a source tarball, an sdist -archive or a direct reference to a tag or specific commit in an online -version control system. - -All version URL references SHOULD either specify a secure transport -mechanism (such as ``https``) or else include an expected hash value in the -URL for verification purposes. If an insecure transport is specified without -any hash information (or with hash information that the tool doesn't -understand), automated tools SHOULD at least emit a warning and MAY + "source_label": "1.0.0-alpha.1" + + "source_label": "1.3.7+build.11.e0f985a" + + "source_label": "v1.8.1.301.ga0df26f" + + "source_label": "2013.02.17.dev123" + + +Source URL +---------- + +A string containing a full URL where the source for this specific version of +the distribution can be downloaded. + +Source URLs MUST be unique within each project. This means that the URL +can't be something like ``"https://github.com/pypa/pip/archive/master.zip"``, +but instead must be ``"https://github.com/pypa/pip/archive/1.3.1.zip"``. + +The source URL MUST reference either a source archive or a tag or specific +commit in an online version control system that permits creation of a +suitable VCS checkout. It is intended primarily for integrators that +wish to recreate the distribution from the original source form. + +All source URL references SHOULD specify a secure transport +mechanism (such as ``https``), include an expected hash value in the +URL for verification purposes, or both. If an insecure transport is specified +without any hash information, with hash information that the tool doesn't +understand, or with a selected hash algorithm that the tool considers too +weak to trust, automated tools SHOULD at least emit a warning and MAY refuse to rely on the URL. It is RECOMMENDED that only hashes which are unconditionally provided by @@ -530,7 +745,7 @@ ``'sha512'``. For source archive references, an expected hash value may be specified by -including a ``=`` as part of the URL +including a ``=`` entry as part of the URL fragment. For version control references, the ``VCS+protocol`` scheme SHOULD be @@ -542,32 +757,9 @@ Example:: - "version_url": "https://github.com/pypa/pip/archive/1.3.1.zip" - "version_url": "http://github.com/pypa/pip/archive/1.3.1.zip#sha1=da9234ee9982d4bbb3c72346a6de940a148ea686" - "version_url": "git+https://github.com/pypa/pip.git at 1.3.1" - -.. note:: - - This was called "Download-URL" in previous versions of the metadata. It - has been renamed, since there are plenty of other download locations and - this URL is meant to be a way to get the original source for development - purposes (or to generate an SRPM or other platform specific equivalent). - - For extra fun and games, it appears that unlike "svn+ssh://", - neither "git+ssh://" nor "hg+ssh://" natively support direct linking to a - particular tag (hg does support direct links to bookmarks through the URL - fragment, but that doesn't help for git and doesn't appear to be what I - want anyway). - - However pip does have a `defined convention - `__ for - this kind of link, which effectively splits a "URL" into "@". - - The PEP simply adopts pip's existing solution to this problem. - - This field is separate from the project URLs, as it's expected to - change for each version, while the project URLs are expected to - be fairly stable. + "source_url": "https://github.com/pypa/pip/archive/1.3.1.zip" + "source_url": "http://github.com/pypa/pip/archive/1.3.1.zip#sha1=da9234ee9982d4bbb3c72346a6de940a148ea686" + "source_url": "git+https://github.com/pypa/pip.git at 1.3.1" Additional descriptive metadata @@ -580,74 +772,29 @@ a distribution does not provide them, including failing cleanly when an operation depending on one of these fields is requested. -Summary + +License ------- -A one-line summary of what the distribution does. - -Publication tools SHOULD emit a warning if this field is not provided. Index -servers MAY require that this field be present before allowing a -distribution to be uploaded. +A short string summarising the license used for this distribution. + +Note that distributions that provide this field should still specify any +applicable license Trove classifiers in the `Classifiers`_ field. Even +when an appropriate Trove classifier is available, the license summary can +be a good way to specify a particular version of that license, or to +indicate any variations or exception to the license. + +This field SHOULD contain fewer than 512 characters and MUST contain fewer +than 2048. + +This field SHOULD NOT contain any line breaks. + +The full license text SHOULD be included as a separate file in the source +archive for the distribution. See `Document names`_ for details. Example:: - "summary": "A module that is more fiendish than soft cushions." - -.. note:: - - This used to be mandatory, and it's still highly recommended, but really, - nothing should break even when it's missing. - - -Description ------------ - -The distribution metadata should include a longer description of the -distribution that may run to several paragraphs. Software that deals -with metadata should not assume any maximum size for the description. - -The distribution description can be written using reStructuredText -markup [1]_. For programs that work with the metadata, supporting -markup is optional; programs may also display the contents of the -field as plain text without any special formatting. This means that -authors should be conservative in the markup they use. - -Example:: - - "description": "The ComfyChair module replaces SoftCushions.\\n\\nUse until lunchtime, but pause for a cup of coffee at eleven." - -.. note:: - - The difficulty of editing this field in a raw JSON file is one of the - main reasons this metadata interchange format is NOT recommended for - use as an input format for build tools. - - -Description Format ------------------- - -A field indicating the intended format of the text in the description field. -This allows index servers to render the description field correctly and -provide feedback on rendering errors, rather than having to guess the -intended format. - -If this field is omitted, or contains an unrecognised value, the default -rendering format MUST be plain text. - -The following format names SHOULD be used for the specified markup formats: - -* ``txt``: Plain text (default handling if field is omitted) -* ``rst``: reStructured Text -* ``md``: Markdown (exact syntax variant will be implementation dependent) -* ``adoc``: AsciiDoc -* ``html``: HTML - -Automated tools MAY render one or more of the listed formats as plain -text and MAY accept other markup formats beyond those listed. - -Example:: - - "description_format": "rst" + "license": "GPL version 3, excluding DRM provisions" Keywords @@ -661,40 +808,6 @@ "keywords": ["comfy", "chair", "cushions", "too silly", "monty python"] -License -------- - -A string indicating the license covering the distribution where the license -is not a simple selection from the "License" Trove classifiers. See -Classifiers" below. This field may also be used to specify a -particular version of a license which is named via the ``Classifier`` -field, or to indicate a variation or exception to such a license. - -Example:: - - "license": "GPL version 3, excluding DRM provisions" - - -License URL ------------ - -A specific URL referencing the full licence text for this version of the -distribution. - -Example:: - - "license_url": "https://github.com/pypa/pip/blob/1.3.1/LICENSE.txt" - -.. note:: - - Like Version URL, this is handled separately from the project URLs - as it is important that it remain accurate for this *specific* - version of the distribution, even if the project later switches to a - different license. - - The project URLs field is intended for more stable references. - - Classifiers ----------- @@ -704,15 +817,64 @@ Example:: "classifiers": [ - "Development Status :: 4 - Beta", - "Environment :: Console (Text Based)" + "Development Status :: 4 - Beta", + "Environment :: Console (Text Based)", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)" ] -Contact metadata -================ - -Contact metadata for a distribution is provided to allow users to get +Document names +-------------- + +Filenames for supporting documents included in the distribution's +``dist-info`` metadata directory. + +The following supporting documents can be named: + +* ``description``: a file containing a long description of the distribution +* ``license``: a file with the full text of the distribution's license +* ``changelog``: a file describing changes made to the distribution + +Supporting documents MUST be included directly in the ``dist-info`` +directory. Directory separators are NOT permitted in document names. + +The markup format (if any) for the file is indicated by the file extension. +This allows index servers and other automated tools to render included +text documents correctly and provide feedback on rendering errors, rather +than having to guess the intended format. + +If the filename has no extension, or the extension is not recognised, the +default rendering format MUST be plain text. + +The following markup renderers SHOULD be used for the specified file +extensions: + +* Plain text: ``.txt``, no extension, unknown extension +* reStructured Text: ``.rst`` +* Markdown: ``.md`` +* AsciiDoc: ``.adoc``, ``.asc``, ``.asciidoc`` +* HTML: ``.html``, ``.htm`` + +Automated tools MAY render one or more of the specified formats as plain +text and MAY render other markup formats beyond those listed. + +Automated tools SHOULD NOT make any assumptions regarding the maximum length +of supporting document content, except as necessary to protect the +integrity of a service. + +Example:: + + "document_names": { + "description": "README.rst", + "license": "LICENSE.rst", + "changelog": "NEWS" + } + + +Contributor metadata +==================== + +Contributor metadata for a distribution is provided to allow users to get access to more information about the distribution and its maintainers. These details are recorded as mappings with the following subfields: @@ -720,42 +882,36 @@ * ``name``: the name of an individual or group * ``email``: an email address (this may be a mailing list) * ``url``: a URL (such as a profile page on a source code hosting service) -* ``type``: one of ``"author"``, ``"maintainer"``, ``"organization"`` - or ``"individual"`` +* ``role``: one of ``"author"``, ``"maintainer"`` or ``"contributor"`` The ``name`` subfield is required, the other subfields are optional. -If no specific contact type is stated, the default is ``individual``. - -The different contact types are as follows: +If no specific role is stated, the default is ``contributor``. + +Email addresses must be in the form ``local-part at domain`` where the +local-part may be up to 64 characters long and the entire email address +contains no more than 254 characters. The formal specification of the +format is in RFC 5322 (sections 3.2.3 and 3.4.1) and RFC 5321, with a more +readable form given in the informational RFC 3696 and the associated errata. + +The defined contributor roles are as follows: * ``author``: the original creator of a distribution * ``maintainer``: the current lead contributor for a distribution, when they are not the original creator -* ``individual``: any other individuals involved in the creation of the - distribution -* ``organization``: indicates that these contact details are for an - organization (formal or informal) rather than for a specific individual - -.. note:: - - This is admittedly a little complicated, but it's designed to replace the - Author, Author-Email, Maintainer, Maintainer-Email fields from metadata - 1.2 in a way that allows those distinctions to be fully represented for - lossless translation, while allowing future distributions to pretty - much ignore everything other than the contact/contributor distinction - if they so choose. - -Contact metadata is optional. Automated tools MUST operate correctly if -a distribution does not provide them, including failing cleanly when an -operation depending on one of these fields is requested. +* ``contributor``: any other individuals or organizations involved in the + creation of the distribution + +Contact and contributor metadata is optional. Automated tools MUST operate +correctly if a distribution does not provide it, including failing cleanly +when an operation depending on one of these fields is requested. Contacts -------- -A list of contact entries giving the recommended contact points for getting -more information about the project. +A list of contributor entries giving the recommended contact points for +getting more information about the project. The example below would be suitable for a project that was in the process of handing over from the original author to a new lead maintainer, while @@ -766,18 +922,17 @@ "contacts": [ { "name": "Python Packaging Authority/Distutils-SIG", - "type": "organization", "email": "distutils-sig at python.org", "url": "https://bitbucket.org/pypa/" }, { "name": "Samantha C.", - "type": "maintainer", + "role": "maintainer", "email": "dontblameme at example.org" }, { "name": "Charlotte C.", - "type": "author", + "role": "author", "email": "iambecomingasketchcomedian at example.com" } ] @@ -786,19 +941,19 @@ Contributors ------------ -A list of contact entries for other contributors not already listed as +A list of contributor entries for other contributors not already listed as current project points of contact. The subfields within the list elements are the same as those for the main contact field. Example:: "contributors": [ - {"name": "John C."}, - {"name": "Erik I."}, - {"name": "Terry G."}, - {"name": "Mike P."}, - {"name": "Graeme C."}, - {"name": "Terry J."} + {"name": "John C."}, + {"name": "Erik I."}, + {"name": "Terry G."}, + {"name": "Mike P."}, + {"name": "Graeme C."}, + {"name": "Terry J."} ] @@ -819,55 +974,115 @@ Example:: "project_urls": { - "Documentation": "https://distlib.readthedocs.org" - "Home": "https://bitbucket.org/pypa/distlib" - "Source": "https://bitbucket.org/pypa/distlib/src" - "Tracker": "https://bitbucket.org/pypa/distlib/issues" + "Documentation": "https://distlib.readthedocs.org" + "Home": "https://bitbucket.org/pypa/distlib" + "Repository": "https://bitbucket.org/pypa/distlib/src" + "Tracker": "https://bitbucket.org/pypa/distlib/issues" } -Dependency metadata -=================== +Semantic dependencies +===================== Dependency metadata allows distributions to make use of functionality provided by other distributions, without needing to bundle copies of those distributions. +Semantic dependencies allow publishers to indicate not only which other +distributions are needed, but also *why* they're needed. This additional +information allows integrators to install just the dependencies they need +for specific activities, making it easier to minimise installation +footprints in constrained environments (regardless of the reasons for +those constraints). + +Distributions may declare five differents kinds of dependency: + +* "Meta" dependencies: subdistributions that are grouped together into a + single larger metadistribution for ease of reference and installation. +* Runtime dependencies: other distributions that are needed to actually use + this distribution (but are not considered subdistributions). +* Test dependencies: other distributions that are needed to run the + automated test suite for this distribution (but are not needed just to + use it). +* Build dependencies: other distributions that are needed to build this + distribution. +* Development dependencies: other distributions that are needed when + working on this distribution (but do not fit into one of the other + dependency categories). + +Within each of these categories, distributions may also declare "Extras". +Extras are dependencies that may be needed for some optional functionality, +or which are otherwise complementary to the distribution. + Dependency management is heavily dependent on the version identification and specification scheme defined in PEP 440. -.. note:: - - This substantially changes the old two-phase setup vs runtime dependency - model in metadata 1.2 (which was in turn derived from the setuptools - dependency parameters). The translation is that ``dev_requires`` and - ``build_requires`` both map to ``Setup-Requires-Dist`` - in 1.2, while ``requires`` maps to ``Requires-Dist``. To go the other - way, ``Setup-Requires-Dist`` maps to ``build_requires`` and - ``Requires-Dist`` maps to ``requires``. - All of these fields are optional. Automated tools MUST operate correctly if a distribution does not provide them, by assuming that a missing field indicates "Not applicable for this distribution". -Dependency specifications -------------------------- - -Individual dependencies are typically defined as strings containing a -distribution name (as found in the ``name`` field). The dependency name +Dependency specifiers +--------------------- + +While many dependencies will be needed to use a distribution at all, others +are needed only on particular platforms or only when particular optional +features of the distribution are needed. To handle this, dependency +specifiers are mappings with the following subfields: + +* ``requires``: a list of `requirement specifiers + `__ needed to satisfy the dependency +* ``extra``: the name of a set of optional dependencies that are requested + and installed together. See `Extras (optional dependencies)`_ for details. +* ``environment``: an environment marker defining the environment that + needs these dependencies. See `Environment markers`_ for details. + +``requires`` is the only required subfield. When it is the only subfield, the +dependencies are said to be *unconditional*. If ``extra`` or ``environment`` +is specified, then the dependencies are *conditional*. + +All three fields may be supplied, indicating that the dependencies are +needed only when the named extra is requested in a particular environment. + +Automated tools MUST combine related dependency specifiers (those with +common values for ``extra`` and ``environment``) into a single specifier +listing multiple requirements when serialising metadata or +passing it to an install hook. + +Despite this required normalisation, the same extra name or environment +marker MAY appear in multiple conditional dependencies. This may happen, +for example, if an extra itself only needs some of its dependencies in +specific environments. It is only the combination of extras and environment +markers that is required to be unique in a list of dependency specifiers. + +Any extras referenced from a dependency specifier MUST be named in the +`Extras`_ field for this distribution. This helps avoid typographical +errors and also makes it straightforward to identify the available extras +without scanning the full set of dependencies. + + +Requirement specifiers +---------------------- + +Individual requirements are defined as strings containing a distribution +name (as found in the ``name`` field). The distribution name may be followed by an extras specifier (enclosed in square -brackets) and by a version specification (within parentheses). +brackets) and by a version specifier or direct reference (within +parentheses). + +Whitespace is permitted between the distribution name and an opening +square bracket or parenthesis. Whitespace is also permitted between a +closing square bracket and an opening parenthesis. See `Extras (optional dependencies)`_ for details on extras and PEP 440 -for details on version specifiers. +for details on version specifiers and direct references. The distribution names should correspond to names as found on the `Python Package Index`_; while these names are often the same as the module names as accessed with ``import x``, this is not always the case (especially for distributions that provide multiple top level modules or packages). -Example dependency specifications:: +Example requirement specifiers:: "Flask" "Django" @@ -877,43 +1092,6 @@ "ComfyChair[warmup] (> 0.1)" -Conditional dependencies ------------------------- - -While many dependencies will be needed to use a distribution at all, others -are needed only on particular platforms or only when particular optional -features of the distribution are needed. To enable this, dependency fields -are marked as either unconditional (indicated by ``requires`` in the field -name) or conditional (indicated by ``may_require``) in the field name. - -Unconditional dependency fields are lists of dependency specifications, with -each entry indicated a required dependency. - -Conditional dependencies are lists of mappings with the following fields: - -* ``dependencies``: a list of relevant dependency specifications -* ``extra``: the name of a set of optional dependencies that are requested - and installed together. See `Extras (optional dependencies)`_ for details. -* ``environment``: an environment marker defining the environment that - needs these dependencies. See `Environment markers`_ for details. - -The ``dependencies`` field is required, as is at least one of ``extra`` and -``environment``. All three fields may be supplied, indicating that the -dependency is needed only when that particular set of additional -dependencies is requested in a particular environment. - -Note that the same extras and environment markers MAY appear in multiple -conditional dependencies. This may happen, for example, if an extra itself -only needs some of its dependencies in specific environments. - -.. note:: - - Technically, you could store the conditional and unconditional - dependencies in a single list and switch based on the entry type - (string or mapping), but the ``*requires`` vs ``*may-require`` two - list design seems easier to understand and work with. - - Mapping dependencies to development and distribution activities --------------------------------------------------------------- @@ -921,207 +1099,176 @@ and development activities identified above, and govern which dependencies should be installed for the specified activities: -* Deployment dependencies: - - * ``requires`` - * ``may_require`` - * Request the ``test`` extra to also install - +* Implied runtime dependencies: + + * ``meta_requires`` + * ``run_requires`` + +* Implied build dependencies: + + * ``build_requires`` + * If running the distribution's test suite as part of the build process, + request the ``:meta:``, ``:run:`` and ``:test:`` extras to also + install: + + * ``meta_requires`` + * ``run_requires`` * ``test_requires`` - * ``test_may_require`` - -* Build dependencies: - + +* Implied development and publication dependencies: + + * ``meta_requires`` + * ``run_requires`` * ``build_requires`` - * ``build_may_require`` - -* Development dependencies: - - * ``requires`` - * ``may_require`` - * ``build_requires`` - * ``build_may_require`` * ``test_requires`` - * ``test_may_require`` * ``dev_requires`` - * ``dev_may_require`` - -To ease compatibility with existing two phase setup/deployment toolchains, -installation tools MAY treat ``dev_requires`` and ``dev_may_require`` as -additions to ``build_requires`` and ``build_may_require`` rather than -as separate fields. - -Installation tools SHOULD allow users to request at least the following -operations for a named distribution: - -* Install the distribution and any deployment dependencies. -* Install just the build dependencies without installing the distribution -* Install just the development dependencies without installing - the distribution - -The notation described in `Extras (optional dependencies)`_ SHOULD be used to -request additional optional dependencies when installing deployment -or build dependencies. - -Installation tools SHOULD report an error if dependencies cannot be found, -MUST at least emit a warning, and MAY allow the user to force the -installation to proceed regardless. - -.. note:: - - As an example of mapping this to Linux distro packages, assume an - example project without any extras defined is split into 2 RPMs - in a SPEC file: example and example-devel - - The ``requires`` and applicable ``may_require`` dependencies would be - mapped to the Requires dependencies for the "example" RPM (a mapping from - environment markers to SPEC file conditions would also allow those to - be handled correctly) - - The ``build_requires`` and ``build_may_require`` dependencies would be - mapped to the BuildRequires dependencies for the "example" RPM. - - All defined dependencies relevant to Linux, including those in - ``dev_requires`` and ``test_requires``, would become Requires - dependencies for the "example-devel" RPM. - - If a project defines any extras, those would be mapped to additional - virtual RPMs with appropriate BuildRequires and Requires entries based - on the details of the dependency specifications. - - A documentation toolchain dependency like Sphinx would either go in - ``build_requires`` (for example, if man pages were included in the - built distribution) or in ``dev_requires`` (for example, if the - documentation is published solely through ReadTheDocs or the - project website). This would be enough to allow an automated converter - to map it to an appropriate dependency in the spec file. - - -Requires --------- - -A list of other distributions needed when this distribution is deployed. - -Example:: - - "requires": ["SciPy", "PasteDeploy", "zope.interface (>3.5.0)"] + +The notation described in `Extras (optional dependencies)`_ SHOULD be used +to determine exactly what gets installed for various operations. + +Installation tools SHOULD report an error if dependencies cannot be +satisfied, MUST at least emit a warning, and MAY allow the user to force +the installation to proceed regardless. + +See Appendix B for an overview of mapping these dependencies to an RPM +spec file. Extras ------ A list of optional sets of dependencies that may be used to define -conditional dependencies in ``"may_require"`` and similar fields. See +conditional dependencies in dependency fields. See `Extras (optional dependencies)`_ for details. -The extra name``"test"`` is reserved for requesting the dependencies -specified in ``test_requires`` and ``test_may_require`` and is NOT -permitted in this field. +The names of extras MUST abide by the same restrictions as those for +distribution names. Example:: "extras": ["warmup"] -May require ------------ - -A list of other distributions that may be needed when this distribution -is deployed, based on the extras requested and the target deployment -environment. - -Any extras referenced from this field MUST be named in the `Extras`_ field. +Meta requires +------------- + +An abbreviation of "metadistribution requires". This is a list of +subdistributions that can easily be installed and used together by +depending on this metadistribution. + +In this field, automated tools: + +* MUST allow strict version matching +* MUST NOT allow more permissive version specifiers. +* MAY allow direct references + +Public index servers SHOULD NOT allow the use of direct references in +uploaded distributions. Direct references are intended primarily as a +tool for software integrators rather than publishers. + +Distributions that rely on direct references to platform specific binary +archives SHOULD define appropriate constraints in their +``supports_environments`` field. Example:: - "may_require": [ - { - "dependencies": ["pywin32 (>1.0)"], - "environment": "sys.platform == 'win32'" - }, - { - "dependencies": ["SoftCushions"], - "extra": "warmup" - } - ] + "meta_requires": + { + "requires": ["ComfyUpholstery (== 1.0a2)", + "ComfySeatCushion (== 1.0a2)"] + }, + { + "requires": ["CupOfTeaAtEleven (== 1.0a2)"], + "environment": "'linux' in sys.platform" + } + ] + + +Run requires +------------ + +A list of other distributions needed to actually run this distribution. + +Automated tools MUST NOT allow strict version matching clauses or direct +references in this field - if permitted at all, such clauses should appear +in ``meta_requires`` instead. + +Example:: + + "run_requires": + { + "requires": ["SciPy", "PasteDeploy", "zope.interface (>3.5.0)"] + }, + { + "requires": ["pywin32 (>1.0)"], + "environment": "sys.platform == 'win32'" + }, + { + "requires": ["SoftCushions"], + "extra": "warmup" + } + ] + Test requires ------------- A list of other distributions needed in order to run the automated tests -for this distribution, either during development or when running the -``test_installed_dist`` metabuild when deployed. +for this distribution.. + +Automated tools MAY disallow strict version matching clauses and direct +references in this field and SHOULD at least emit a warning for such clauses. + +Public index servers SHOULD NOT allow strict version matching clauses or +direct references in this field. Example:: - "test_requires": ["unittest2"] - - -Test may require ----------------- - -A list of other distributions that may be needed in order to run the -automated tests for this distribution, either during development or when -running the ``test_installed_dist`` metabuild when deployed, based on the -extras requested and the target deployment environment. - -Any extras referenced from this field MUST be named in the `Extras`_ field. - -Example:: - - "test_may_require": [ - { - "dependencies": ["pywin32 (>1.0)"], - "environment": "sys.platform == 'win32'" - }, - { - "dependencies": ["CompressPadding"], - "extra": "warmup" - } - ] + "test_requires": + { + "requires": ["unittest2"] + }, + { + "requires": ["pywin32 (>1.0)"], + "environment": "sys.platform == 'win32'" + }, + { + "requires": ["CompressPadding"], + "extra": "warmup" + } + ] Build requires -------------- A list of other distributions needed when this distribution is being built -(creating a binary archive from a source archive). +(creating a binary archive from an sdist, source archive or VCS checkout). Note that while these are build dependencies for the distribution being built, the installation is a *deployment* scenario for the dependencies. +Automated tools MAY disallow strict version matching clauses and direct +references in this field and SHOULD at least emit a warning for such clauses. + +Public index servers SHOULD NOT allow strict version matching clauses or +direct references in this field. + Example:: - "build_requires": ["setuptools (>= 0.7)"] - - -Build may require ------------------ - -A list of other distributions that may be needed when this distribution -is built (creating a binary archive from a source archive), based on the -features requested and the build environment. - -Note that while these are build dependencies for the distribution being -built, the installation is a *deployment* scenario for the dependencies. - -Any extras referenced from this field MUST be named in the `Extras`_ field. - -Automated tools MAY assume that all extras are implicitly requested when -installing build dependencies. - -Example:: - - "build_may_require": [ - { - "dependencies": ["pywin32 (>1.0)"], - "environment": "sys.platform == 'win32'" - }, - { - "dependencies": ["cython"], - "extra": "c-accelerators" - } - ] + "build_requires": + { + "requires": ["setuptools (>= 0.7)"] + }, + { + "requires": ["pywin32 (>1.0)"], + "environment": "sys.platform == 'win32'" + }, + { + "requires": ["cython"], + "extra": "c-accelerators" + } + ] Dev requires @@ -1133,42 +1280,27 @@ Additional dependencies that may be listed in this field include: -* tools needed to create a source archive +* tools needed to create an sdist from a source archive or VCS checkout * tools needed to generate project documentation that is published online rather than distributed along with the rest of the software -* additional test dependencies for tests which are not executed when the - test is invoked through the ``test_installed_dist`` metabuild hook (for - example, tests that require a local database server and web server and - may not work when fully installed on a production system) + +Automated tools MAY disallow strict version matching clauses and direct +references in this field and SHOULD at least emit a warning for such clauses. + +Public index servers SHOULD NOT allow strict version matching clauses or +direct references in this field. Example:: - "dev_requires": ["hgtools", "sphinx (>= 1.0)"] - - -Dev may require ---------------- - -A list of other distributions that may be needed during development of -this distribution, based on the features requested and the build environment. - -This should only be needed if the project's own utility scripts have -platform specific dependencies that aren't already defined as deployment -or build dependencies. - -Any extras referenced from this field MUST be named in the `Extras`_ field. - -Automated tools MAY assume that all extras are implicitly requested when -installing development dependencies. - -Example:: - - "dev_may_require": [ - { - "dependencies": ["pywin32 (>1.0)"], - "environment": "sys.platform == 'win32'" - } - ] + "dev_requires": + { + "requires": ["hgtools", "sphinx (>= 1.0)"] + }, + { + "requires": ["pywin32 (>1.0)"], + "environment": "sys.platform == 'win32'" + } + ] Provides @@ -1190,7 +1322,17 @@ project is able to include a ``"provides": ["distribute"]`` entry to satisfy any projects that require the now obsolete distribution's name. -A distribution may also provide a "virtual" project name, which does +To avoid malicious hijacking of names, when interpreting metadata retrieved +from a public index server, automated tools MUST NOT pay any attention to +``"provides"`` entries that do not correspond to a published distribution. + +However, to appropriately handle project forks and mergers, automated tools +MUST accept ``"provides"`` entries that name other distributions when the +entry is retrieved from a local installation database or when there is a +corresponding ``"obsoleted_by"`` entry in the metadata for the named +distribution. + +A distribution may wish to depend on a "virtual" project name, which does not correspond to any separately distributed project: such a name might be used to indicate an abstract capability which could be supplied by one of multiple projects. For example, multiple projects might supply @@ -1198,13 +1340,20 @@ that it provides ``sqlalchemy-postgresql-bindings``, allowing other projects to depend only on having at least one of them installed. -A version declaration may be supplied and must follow the rules described -in PEP 440. The distribution's version identifier will be implied -if none is specified. +To handle this case in a way that doesn't allow for name hijacking, the +authors of the distribution that first defines the virtual dependency should +create a project on the public index server with the corresponding name, and +depend on the specific distribution that should be used if no other provider +is already installed. This also has the benefit of publishing the default +provider in a way that automated tools will understand. + +A version declaration may be supplied as part of an entry in the provides +field and must follow the rules described in PEP 440. The distribution's +version identifier will be implied if none is specified. Example:: - "provides": ["AnotherProject (3.4)", "virtual_package"] + "provides": ["AnotherProject (3.4)", "virtual-package"] Obsoleted by @@ -1250,63 +1399,253 @@ Individual entries are environment markers, as described in `Environment markers`_. -Installation tools SHOULD report an error if supported platforms are +Installation tools SHOULD report an error if supported environments are specified by the distribution and the current platform fails to match any of them, MUST at least emit a warning, and MAY allow the user to force the installation to proceed regardless. -Examples:: - +The two main uses of this field are to declare which versions of Python +and which underlying operating systems are supported. + +Examples indicating supported Python versions:: + + # Supports Python 2.6+ + "supports_environments": ["python_version >= '2.6'"] + + # Supports Python 2.6+ (for 2.x) or 3.3+ (for 3.x) + "supports_environments": ["python_version >= '3.3'", + "'3.0' > python_version >= '2.6'"] + +Examples indicating supported operating systems:: + + # Windows only "supports_environments": ["sys_platform == 'win32'"] + + # Anything except Windows "supports_environments": ["sys_platform != 'win32'"] + + # Linux or BSD only "supports_environments": ["'linux' in sys_platform", "'bsd' in sys_platform"] +Example where the supported Python version varies by platform:: + + # The standard library's os module has long supported atomic renaming + # on POSIX systems, but only gained atomic renaming on Windows in Python + # 3.3. A distribution that needs atomic renaming support for reliable + # operation might declare the following supported environments. + "supports_environments": ["python_version >= '2.6' and sys_platform != 'win32'", + "python_version >= '3.3' and sys_platform == 'win32'"] + +Installed interfaces +==================== + +Most Python distributions expose packages and modules for import through +the Python module namespace. Distributions may also expose other +interfaces when installed. + +Export specifiers +----------------- + +An export specifier is a string consisting of a fully qualified name, as +well as an optional extra name enclosed in square brackets. This gives the +following four possible forms for an export specifier:: + + module + module:name + module[requires_extra] + module:name[requires_extra] .. note:: - This field replaces the old Platform, Requires-Platform and - Requires-Python fields and has been redesigned with environment - marker based semantics that should make it possible to reliably flag, - for example, Unix specific or Windows specific distributions, as well - as Python 2 only and Python 3 only distributions. - - -Metabuild system -================ - -The ``metabuild_hooks`` field is used to define various operations that -may be invoked on a distribution in a platform independent manner. - -The metabuild system currently defines three operations as part of the -deployment of a distribution: + The jsonschema file currently restricts qualified names using the + Python 2 ASCII identifier rules. This may need to be reconsidered + given the more relaxed identifier rules in Python 3. + +The meaning of the subfields is as follows: + +* ``module``: the module providing the export +* ``name``: if applicable, the qualified name of the export within the module +* ``requires_extra``: indicates the export will only work correctly if the + additional dependencies named in the given extra are available in the + installed environment + +.. note:: + + I tried this as a mapping with subfields, and it made the examples below + unreadable. While this PEP is mostly for tool use, readability still + matters to some degree for debugging purposes, and because I expect + snippets of the format to be reused elsewhere. + + +Modules +------- + +A list of qualified names of modules and packages that the distribution +provides for import. + +.. note:: + + The jsonschema file currently restricts qualified names using the + Python 2 ASCII identifier rules. This may need to be reconsidered + given the more relaxed identifier rules in Python 3. + +For names that contain dots, the portion of the name before the final dot +MUST appear either in the installed module list or in the namespace package +list. + +To help avoid name conflicts, it is RECOMMENDED that distributions provide +a single top level module or package that matches the distribution name +(or a lower case equivalent). This requires that the distribution name also +meet the requirements of a Python identifier, which are stricter than +those for distribution names). This practice will also make it easier to +find authoritative sources for modules. + +Index servers SHOULD allow multiple distributions to publish the same +modules, but MAY notify distribution authors of potential conflicts. + +Installation tools SHOULD report an error when asked to install a +distribution that provides a module that is also provided by a different, +previously installed, distribution. + +Note that attempting to import some declared modules may result in an +exception if the appropriate extras are not installed. + +Example:: + + "modules": ["chair", "chair.cushions", "python_sketches.nobody_expects"] + +.. note:: + + Making this a list of export specifiers instead would allow a distribution + to declare when a particular module requires a particular extra in order + to run correctly. On the other hand, there's an argument to be made that + that is the point where it starts to become worthwhile to split out a + separate distribution rather than using extras. + + +Namespaces +---------- + +A list of qualified names of namespace packages that the distribution +contributes modules to. + +.. note:: + + The jsonschema file currently restricts qualified names using the + Python 2 ASCII identifier rules. This may need to be reconsidered + given the more relaxed identifier rules in Python 3. + +On versions of Python prior to Python 3.3 (which provides native namespace +package support), installation tools SHOULD emit a suitable ``__init__.py`` +file to properly initialise the namespace rather than using a distribution +provided file. + +Installation tools SHOULD emit a warning and MAY emit an error if a +distribution declares a namespace package that conflicts with the name of +an already installed module or vice-versa. + +Example:: + + "namespaces": ["python_sketches"] + + +Commands +-------- + +The ``commands`` mapping contains three subfields: + +* ``wrap_console``: console wrapper scripts to be generated by the installer +* ``wrap_gui``: GUI wrapper scripts to be generated by the installer +* ``prebuilt``: scripts created by the distribution's build process and + installed directly to the configured scripts directory + +``wrap_console`` and ``wrap_gui`` are both mappings of script names to +export specifiers. The script names must follow the same naming rules as +distribution names. + +The export specifiers for wrapper scripts must refer to either a package +with a __main__ submodule (if no ``name`` subfield is given in the export +specifier) or else to a callable inside the named module. + +Installation tools should generate appropriate wrappers as part of the +installation process. + +.. note:: + + Still needs more detail on what "appropriate wrappers" means. For now, + refer to what setuptools and zc.buildout generate as wrapper scripts. + +``prebuilt`` is a list of script paths, relative to the scripts directory in +a wheel file or following installation. They are provided for informational +purpose only - installing them is handled through the normal processes for +files created when building a distribution. + +Index servers SHOULD allow multiple distributions to publish the same +commands, but MAY notify distribution authors of potential conflicts. + +Installation tools SHOULD report an error when asked to install a +distribution that provides a command that is also provided by a different, +previously installed, distribution. + +Example:: + + "commands": { + "wrap_console": [{"wrapwithpython": "chair.run_cli"}], + "wrap_gui": [{"wrapwithpythonw": "chair:run_gui"}], + "prebuilt": ["notawrapper"] + } + + + +Exports +------- + +The ``exports`` field is a mapping containing qualified names as keys. Each +key identifies an export group containing one or more exports published by +the distribution. + +Export group names are defined by distributions that will then make use of +the published export information in some way. The primary use case is for +distributions that support a plugin model: defining an export group allows +other distributions to indicate which plugins they provide, how they +can be imported and accessed, and which additional dependencies (if any) +are needed for the plugin to work correctly. + +To reduce the chance of name conflicts, export group names SHOULD use a +prefix that corresponds to a module name in the distribution that defines +the meaning of the export group. This practice will also make it easier to +find authoritative documentation for export groups. + +Each individual export group is then a mapping of arbitrary non-empty string +keys to export specifiers. The meaning of export names within an export +group is up to the distribution that defines the export group. Creating an +appropriate definition for the export name format can allow the importing +distribution to determine whether or not an export is relevant without +needing to import every exporting module. + + +Install hooks +============= + +The ``install_hooks`` field is used to define operations to be +invoked on the distribution in the following situations: * Installing to a deployment system * Uninstalling from a deployment system -* Running the distribution's test suite on a deployment system (hence the - ``test`` runtime extra) - -Distributions may define handles for each of these operations as an -"entry point", a reference to a Python callable, with the module name -separated from the reference within the module by a colon (``:``). - -Example metabuild hooks:: - - "metabuild_hooks": { - "postinstall": "myproject.build_hooks:postinstall", - "preuininstall": "myproject.build_hooks:preuninstall", - "test_installed_dist": "some_test_harness.metabuild_hook" + +Distributions may define handlers for each of these operations as an +"entry point", which is a reference to a Python callable, with the module +name separated from the reference within the module by a colon (``:``). + +Example install hooks:: + + "install_hooks": { + "postinstall": "ComfyChair.install_hooks:postinstall", + "preuininstall": "ComfyChair.install_hooks:preuninstall" } -Build and installation tools MAY offer additional operations beyond the -core metabuild operations. These operations SHOULD be composed from the -defined metabuild operations where appropriate. - -Build and installation tools SHOULD support the legacy ``setup.py`` based -commands for metabuild operations not yet defined as metabuild hooks. - -The metabuild hooks are gathered together into a single top level -``metabuild_hooks`` field. The individual hooks are: +The currently defined install hooks are: * ``postinstall``: run after the distribution has been installed to a target deployment system (or after it has been upgraded). If the hook is @@ -1316,18 +1655,15 @@ deployment system (or before it is upgraded). If the hook is not defined, it indicates no distribution specific actions are needed prior to uninstallation. -* ``test_installed_dist``: test an installed distribution is working. If the - hook is not defined, it indicates the distribution does not support - execution of the test suite after deployment. - -The expected signatures of these hooks are as follows:: + +The required signatures of these hooks are as follows:: def postinstall(current_meta, previous_meta=None): """Run following installation or upgrade of the distribution *current_meta* is the distribution metadata for the version now installed on the current system - *previous_meta* is either missing or ``None`` (indicating a fresh + *previous_meta* is either omitted or ``None`` (indicating a fresh install) or else the distribution metadata for the version that was previously installed (indicating an upgrade or downgrade). """ @@ -1337,61 +1673,111 @@ *current_meta* is the distribution metadata for the version now installed on the current system - *next_meta* is either missing or ``None`` (indicating complete + *next_meta* is either omitted or ``None`` (indicating complete uninstallation) or else the distribution metadata for the version that is about to be installed (indicating an upgrade or downgrade). """ - def test_installed_dist(current_meta): - """Check an installed distribution is working correctly - - Note that this check should always be non-destructive as it may be - invoked automatically by some tools. - - Requires that the distribution's test dependencies be installed - (indicated by the ``test`` runtime extra). - - Returns ``True`` if the check passes, ``False`` otherwise. - """ - -Metabuild hooks MUST be called with at least abbreviated metadata, and MAY -be called with full metadata. - -Where necessary, metabuild hooks check for the presence or absence of -optional dependencies defined as extras using the same techniques used -during normal operation of the distribution (for example, checking for -import failures for optional dependencies). +When install hooks are defined, it is assumed that they MUST be executed +to obtain a properly working installation of the distribution, and to +properly remove the distribution from a system. + +Install hooks SHOULD NOT be used to provide functionality that is +expected to be provided by installation tools (such as rewriting of +shebang lines and generation of executable wrappers for Windows). + +Installation tools MUST ensure the distribution is fully installed, and +available through the import system and installation database when invoking +install hooks. + +Installation tools MUST call install hooks with full metadata, rather than +only the essential dependency resolution metadata. + +The given parameter names are considered part of the hook signature. +Installation tools MUST call install hooks solely with keyword arguments. +Install hook implementations MUST use the given parameter names. + +Installation tools SHOULD invoke install hooks automatically after +installing a distribution from a binary archive. + +When installing from an sdist, source archive or VCS checkout, installation +tools SHOULD create a binary archive using ``setup.py bdist_wheel`` and +then install binary archive normally (including invocation of any install +hooks). Installation tools SHOULD NOT invoke ``setup.py install`` directly. + +Installation tools SHOULD treat an exception thrown by a postinstall hook +as a failure of the installation and revert any other changes made to the +system. + +Installation tools SHOULD treat an exception thrown by a preuninstall hook +as an indication the removal of the distribution should be aborted. + +Installation tools MUST NOT silently ignore install hooks, as failing +to call these hooks may result in a misconfigured installation that fails +unexpectedly at runtime. Installation tools MAY refuse to install +distributions that define install hooks, or require that users +explicitly opt in to permitting the execution of such hooks. + +Install hook implementations MUST NOT make any assumptions regarding the +current working directory when they are invoked, and MUST NOT make +persistent alterations to the working directory or any other process global +state (other than potentially importing additional modules, or other +expected side effects of running the distribution). + +Install hooks have access to the full metadata for the release being +installed, that of the previous/next release (as appropriate), as well as +to all the normal runtime information (such as available imports). Hook +implementations can use this information to perform additional platform +specific installation steps. To check for the presence or absence of +"extras", hook implementations should use the same runtime checks that +would be used during normal operation (such as checking for the availability +of the relevant dependencies). Metadata Extensions =================== Extensions to the metadata may be present in a mapping under the -'extensions' key. The keys must meet the same restrictions as -distribution names, while the values may be any type natively supported -in JSON:: +'extensions' key. The keys must be valid qualified names, while +the values may be any type natively supported in JSON:: "extensions" : { - "chili" : { "type" : "Poblano", "heat" : "Mild" }, - "languages" : [ "French", "Italian", "Hebrew" ] + "chili" : { "type" : "Poblano", "heat" : "Mild" }, + "languages" : [ "French", "Italian", "Hebrew" ] } -To avoid name conflicts, it is recommended that distribution names be used -to identify metadata extensions. This practice will also make it easier to +Extension names are defined by distributions that will then make use of +the additional published metadata in some way. + +To reduce the chance of name conflicts, extension names SHOULD use a +prefix that corresponds to a module name in the distribution that defines +the meaning of the extension. This practice will also make it easier to find authoritative documentation for metadata extensions. +Metadata extensions allow development tools to record information in the +metadata that may be useful during later phases of distribution. For +example, a build tool could include default build options in a metadata +extension when creating an sdist, and use those when creating the wheel +files later. + Extras (optional dependencies) ============================== Extras are additional dependencies that enable an optional aspect -of the distribution, generally corresponding to a ``try: import +of the distribution, often corresponding to a ``try: import optional_dependency ...`` block in the code. To support the use of the distribution with or without the optional dependencies they are listed separately from the distribution's core dependencies and must be requested explicitly, either in the dependency specifications of another distribution, or else when issuing a command to an installation tool. +Note that installation of extras is not tracked directly by installation +tools: extras are merely a convenient way to indicate a set of dependencies +that is needed to provide some optional functionality of the distribution. +If selective *installation* of components is desired, then multiple +distributions must be defined rather than relying on the extras system. + The names of extras MUST abide by the same restrictions as those for distribution names. @@ -1399,15 +1785,15 @@ "name": "ComfyChair", "extras": ["warmup", "c-accelerators"] - "may_require": [ + "run_requires": [ { - "dependencies": ["SoftCushions"], + "requires": ["SoftCushions"], "extra": "warmup" } ] - "build_may_require": [ + "build_requires": [ { - "dependencies": ["cython"], + "requires": ["cython"], "extra": "c-accelerators" } ] @@ -1416,15 +1802,34 @@ relevant extra names inside square brackets after the distribution name when specifying the dependency. -Extra specifications MUST support the following additional syntax: - -* Multiple features can be requested by separating them with a comma within +Extra specifications MUST allow the following additional syntax: + +* Multiple extras can be requested by separating them with a comma within the brackets. -* All explicitly defined extras may be requested with the ``*`` wildcard - character. Note that this does NOT request the implicitly defined - ``test`` extra - that must always be requested explicitly when it is - desired. -* Extras may be explicitly excluded by prefixing their name with a hyphen. + +* The following special extras request processing of the corresponding + lists of dependencies: + + * ``:meta:``: ``meta_requires`` + * ``:run:``: ``run_requires`` + * ``:test:``: ``test_requires`` + * ``:build:``: ``build_requires`` + * ``:dev:``: ``dev_requires`` + * ``:*:``: process *all* dependency lists + +* The ``*`` character as an extra is a wild card that enables all of the + entries defined in the distribution's ``extras`` field. + +* Extras may be explicitly excluded by prefixing their name with a ``-`` + character (this is useful in conjunction with ``*`` to exclude only + particular extras that are definitely not wanted, while enabling all + others). + +* The ``-`` character as an extra specification indicates that the + distribution itself should NOT be installed, and also disables the + normally implied processing of ``:meta:`` and ``:run:`` dependencies + (those may still be requested explicitly using the appropriate extra + specifications). Command line based installation tools SHOULD support this same syntax to allow extras to be requested explicitly. @@ -1432,15 +1837,32 @@ The full set of dependency requirements is then based on the top level dependencies, along with those of any requested extras. -Example:: +Dependency examples (showing just the ``requires`` subfield):: "requires": ["ComfyChair[warmup]"] - -> requires ``ComfyChair`` and ``SoftCushions`` at run time + -> requires ``ComfyChair`` and ``SoftCushions`` "requires": ["ComfyChair[*]"] - -> requires ``ComfyChair`` and ``SoftCushions`` at run time, but - will also pick up any new optional dependencies other than those - needed solely to run the tests + -> requires ``ComfyChair`` and ``SoftCushions``, but will also + pick up any new extras defined in later versions + +Command line examples:: + + pip install ComfyChair + -> installs ComfyChair with applicable :meta: and :run: dependencies + + pip install ComfyChair[*] + -> as above, but also installs all extra dependencies + + pip install ComfyChair[-,:build:,*] + -> installs just the build dependencies with all extras + + pip install ComfyChair[-,:build:,:run:,:meta:,:test:,*] + -> as above, but also installs dependencies needed to run the tests + + pip install ComfyChair[-,:*:,*] + -> installs the full set of development dependencies, but avoids + installing ComfyChair itself Environment markers @@ -1463,15 +1885,15 @@ requires PyWin32 both at runtime and buildtime when using Windows:: "name": "ComfyChair", - "may_require": [ + "run_requires": [ { - "dependencies": ["pywin32 (>1.0)"], + "requires": ["pywin32 (>1.0)"], "environment": "sys.platform == 'win32'" } ] - "build_may_require": [ + "build_requires": [ { - "dependencies": ["pywin32 (>1.0)"], + "requires": ["pywin32 (>1.0)"], "environment": "sys.platform == 'win32'" } ] @@ -1484,7 +1906,8 @@ The pseudo-grammar is :: MARKER: EXPR [(and|or) EXPR]* - EXPR: ("(" MARKER ")") | (SUBEXPR [(in|==|!=|not in)?SUBEXPR]) + EXPR: ("(" MARKER ")") | (SUBEXPR [CMPOP?SUBEXPR]) + CMPOP: (==|!=|<|>|<=|>=|in|not in) where ``SUBEXPR`` is either a Python string (such as ``'2.4'``, or ``'win32'``) or one of the following marker variables: @@ -1493,29 +1916,42 @@ * ``python_full_version``: see definition below * ``os_name````: ``os.name`` * ``sys_platform````: ``sys.platform`` +* ``platform_release``: ``platform.release()`` * ``platform_version``: ``platform.version()`` * ``platform_machine``: ``platform.machine()`` * ``platform_python_implementation``: ``platform.python_implementation()`` +* ``implementation_name````: ``sys.implementation.name`` +* ``implementation_version````: see definition below + +If a particular value is not available (such as the ``sys.implementation`` +subattributes in versions of Python prior to 3.3), the corresponding marker +variable MUST be considered equivalent to the empty string. Note that all subexpressions are restricted to strings or one of the -marker variable names, meaning that it is not possible to use other -sequences like tuples or lists on the right side of the ``in`` and -``not in`` operators. - -Unlike Python, chaining of comparison operations is NOT permitted in -environment markers. - -The ``python_full_version`` marker variable is derived from -``sys.version_info()`` in accordance with the following algorithm:: - - def format_full_version(): - info = sys.version_info +marker variable names (which refer to string values), meaning that it is +not possible to use other sequences like tuples or lists on the right +side of the ``in`` and ``not in`` operators. + +Chaining of comparison operations is permitted using the normal Python +semantics of an implied ``and``. + +The ``python_full_version`` and ``implementation_version`` marker variables +are derived from ``sys.version_info()`` and ``sys.implementation.version`` +respectively, in accordance with the following algorithm:: + + def format_full_version(info): version = '{0.major}.{0.minor}.{0.micro}'.format(info) kind = info.releaselevel if kind != 'final': version += kind[0] + str(info.serial) return version + python_full_version = format_full_version(sys.version_info) + implementation_version = format_full_version(sys.implementation.version) + +``python_full_version`` will typically correspond to the leading segment +of ``sys.version()``. + Updating the metadata specification =================================== @@ -1523,13 +1959,80 @@ The metadata specification may be updated with clarifications without requiring a new PEP or a change to the metadata version. -Adding new features (other than through the extension mechanism), or -changing the meaning of existing fields, requires a new metadata version -defined in a new PEP. - - -Summary of differences from \PEP 345 -==================================== +Changing the meaning of existing fields or adding new features (other than +through the extension mechanism) requires a new metadata version defined in +a new PEP. + + +Appendix A: Conversion notes for legacy metadata +================================================ + +The reference implementations for converting from legacy metadata to +metadata 2.0 are: + +* the `wheel project `__, which + adds the ``bdist_wheel`` command to ``setuptools`` +* the `Warehouse project `__, which + will eventually be migrated to the Python Packaging Authority as the next + generation Python Package Index implementation +* the `distlib project `__ which is + derived from the core packaging infrastructure created for the + ``distutils2`` project and + +While it is expected that there may be some edge cases where manual +intervention is needed for clean conversion, the specification has been +designed to allow fully automated conversion of almost all projects on +PyPI. + +Metadata conversion (especially on the part of the index server) is a +necessary step to allow installation and analysis tools to start +benefiting from the new metadata format, without having to wait for +developers to upgrade to newer build systems. + + +Appendix B: Mapping dependency declarations to an RPM SPEC file +=============================================================== + +As an example of mapping this PEP to Linux distro packages, assume an +example project without any extras defined is split into 2 RPMs +in a SPEC file: ``example`` and ``example-devel``. + +The ``meta_requires`` and ``run_requires`` dependencies would be mapped +to the Requires dependencies for the "example" RPM (a mapping from +environment markers relevant to Linux to SPEC file conditions would +also allow those to be handled correctly) + +The ``build_requires`` dependencies would be mapped to the BuildRequires +dependencies for the "example" RPM. + +All defined dependencies relevant to Linux, including those in +``dev_requires`` and ``test_requires`` would become Requires dependencies +for the "example-devel" RPM. + +A documentation toolchain dependency like Sphinx would either go in +``build_requires`` (for example, if man pages were included in the +built distribution) or in ``dev_requires`` (for example, if the +documentation is published solely through ReadTheDocs or the +project website). This would be enough to allow an automated converter +to map it to an appropriate dependency in the spec file. + +If the project did define any extras, those could be mapped to additional +virtual RPMs with appropriate BuildRequires and Requires entries based on +the details of the dependency specifications. Alternatively, they could +be mapped to other system package manager features (such as package lists +in ``yum``). + +Other system package managers may have other options for dealing with +extras (Debian packagers, for example, would have the option to map them +to "Recommended" or "Suggested" package entries). + +The metadata extension format should also allow distribution specific hints +to be included in the upstream project metadata without needing to manually +duplicate any of the upstream metadata in a distribution specific format. + + +Appendix C: Summary of differences from \PEP 345 +================================================= * Metadata-Version is now 2.0, with semantics specified for handling version changes @@ -1550,21 +2053,21 @@ * Changed the version scheme to be based on PEP 440 rather than PEP 386 -* Added the build label mechanism as described in PEP 440 - -* Support for different development, build, test and deployment dependencies +* Added the source label mechanism as described in PEP 440 + +* Support for different kinds of dependencies * The "Extras" optional dependency mechanism * A well-defined metadata extension mechanism -* Metabuild hook system +* Install hook system * Clarify and simplify various aspects of environment markers: * allow use of parentheses for grouping in the pseudo-grammar * consistently use underscores instead of periods in the variable names - * clarify that chained comparisons are not permitted + * allow ordered string comparisons and chained comparisons * More flexible system for defining contact points and contributors @@ -1574,9 +2077,11 @@ * Updated obsolescence mechanism -* Added "License URL" field - -* Explicit declaration of description markup format +* Identification of supporting documents in the ``dist-info`` directory: + + * Allows markup formats to be indicated through file extensions + * Standardises the common practice of taking the description from README + * Also supports inclusion of license files and changelogs * With all due respect to Charles Schulz and Peanuts, many of the examples have been updated to be more `thematically appropriate`_ for Python ;) @@ -1625,7 +2130,7 @@ subfields. The old serialisation format also wasn't amenable to easy conversion to -standard Python data structures for use in the new metabuild hook APIs, or +standard Python data structures for use in the new install hook APIs, or in future extensions to the importer APIs to allow them to provide information for inclusion in the installation database. @@ -1649,33 +2154,47 @@ See PEP 440 for the rationale behind the addition of this field. -Development, build and deployment dependencies ----------------------------------------------- - -The separation of the ``requires``, ``build_requires`` and ``dev_requires`` -fields allow a distribution to indicate whether a dependency is needed -specifically to develop, build or deploy the distribution. - -As distribution metadata improves, this should allow much greater control -over where particular dependencies end up being installed . +Support for different kinds of dependencies +------------------------------------------- + +The separation of the five different kinds of dependency allows a +distribution to indicate whether a dependency is needed specifically to +develop, build, test or use the distribution. + +To allow for metadistributions like PyObjC, while still actively +discouraging overly strict dependency specifications, the separate +``meta`` dependency fields are used to separate out those dependencies +where exact version specifications are appropriate. + +The advantage of having these distinctions supported in the upstream Python +specific metadata is that even if a project doesn't care about these +distinction themselves, they may be more amenable to patches from +downstream redistributors that separate the fields appropriately. Over time, +this should allow much greater control over where and when particular +dependencies end up being installed. + +The names for the dependency fields have been deliberately chosen to avoid +conflicting with the existing terminology in setuptools and previous +versions of the metadata standard. Specifically, the names ``requires``, +``install_requires`` and ``setup_requires`` are not used, which will +hopefully reduce confustion when converting legacy metadata to the new +standard. Support for optional dependencies for distributions --------------------------------------------------- The new extras system allows distributions to declare optional -features, and to use the ``may_require`` and ``build_may_require`` fields -to indicate when particular dependencies are needed only to support those -features. It is derived from the equivalent system that is already in -widespread use as part of ``setuptools`` and allows that aspect of the -legacy ``setuptools`` metadata to be accurately represented in the new -metadata format. - -The ``test`` extra is implicitly defined for all distributions, as it -ties in with the new metabuild hook offering a standard way to request -execution of a distribution's test suite. Identifying test suite -dependencies is already one of the most popular uses of the extras system -in ``setuptools``. +behaviour, and to use the dependency fields to indicate when +particular dependencies are needed only to support that behaviour. It is +derived from the equivalent system that is already in widespread use as +part of ``setuptools`` and allows that aspect of the legacy ``setuptools`` +metadata to be accurately represented in the new metadata format. + +The additions to the extras syntax relative to setuptools are defined to +make it easier to express the various possible combinations of dependencies, +in particular those associated with build systems (with optional support +for running the test suite) and development systems. Support for metadata extensions @@ -1691,36 +2210,70 @@ the chosen extension, and the new extras mechanism, allowing support for particular extensions to be provided as optional features. - -Support for metabuild hooks +Possible future uses for extensions include declaration of plugins for +other distributions, hints for automatic conversion to Linux system +packages, and inclusion of CVE references to mark security releases. + + +Support for install hooks --------------------------- -The new metabuild system is designed to allow the wheel format to fully -replace direct installation on deployment targets, by allows projects like -Twisted to still execute code following installation from a wheel file. - -Falling back to invoking ``setup.py`` directly rather than using a -metabuild hook will remain an option when relying on version 1.x metadata, -and is also used as the interim solution for installation from source -archives. - -The ``test_installed_dist`` metabuild hook is included as a complement to -the ability to explicitly specify test dependencies. +The new install hook system is designed to allow the wheel format to fully +replace direct installation on deployment targets, by allowing projects to +explicitly define code that should be executed following installation from +a wheel file. + +This may be something relatively simple, like the `two line +refresh `__ +of the Twisted plugin caches that the Twisted developers recommend for +any project that provides Twisted plugins, to more complex platform +dependent behaviour, potentially in conjunction with appropriate +metadata extensions and ``supports_environments`` entries. + +For example, upstream declaration of external dependencies for various +Linux distributions in a distribution neutral format may be supported by +defining an appropriate metadata extension that is read by a postinstall +hook and converted into an appropriate invocation of the system package +manager. Other operations (such as registering COM DLLs on Windows, +registering services for automatic startup on any platform, or altering +firewall settings) may need to be undertaken with elevated privileges, +meaning they cannot be deferred to implicit execution on first use of the +distribution. + +The install hook and metadata extension systems allow support for such +activities to be pursued independently by the individual platform +communities, while still interoperating with the cross-platform Python +tools. + +Legacy packages that expect to able to run code on target systems using +``setup.py install`` will no longer work correctly. Such packages will +already break when pip 1.4+ is configured to use a wheel cache directory. Changes to environment markers ------------------------------ -The changes to environment markers were just clarifications and +There are three substantive changes to environment markers in this version: + +* ``platform_release`` was added, as it provides more useful information + than ``platform_version`` on at least Linux and Mac OS X (specifically, + it provides details of the running kernel version) +* ordered comparison of strings is allowed, as this is more useful for + setting minimum and maximum versions where conditional dependencies + are needed or where a platform is supported +* comparison chaining is explicitly allowed, as this becomes useful in the + presence of ordered comparisons + +The other changes to environment markers are just clarifications and simplifications to make them easier to use. The arbitrariness of the choice of ``.`` and ``_`` in the different -variables was addressed by standardising on ``_`` (as these are predefined -variables rather than live references into the Python module namespace) - -The use of parentheses for grouping and the disallowance of chained -comparisons were added to address some underspecified behaviour in the -previous version of the specification. +variables was addressed by standardising on ``_`` (as these are all +predefined variables rather than live references into the Python module +namespace) + +The use of parentheses for grouping was explicitly noted to address some +underspecified behaviour in the previous version of the specification. Updated contact information @@ -1751,8 +2304,9 @@ has been used to replace several older fields with poorly defined semantics. For the moment, the old ``Requires-External`` field has been removed -entirely. Possible replacements may be explored through the metadata -extension mechanism. +entirely. The combination of explicit support for post install hooks and the +metadata extension mechanism will hopefully prove to be a more useful +replacement. Updated obsolescence mechanism @@ -1770,22 +2324,55 @@ is not widely supported, and so removing it does not present any significant barrier to tools and projects adopting the new metadata format. -Explicit markup for description -------------------------------- - -Currently, PyPI attempts to detect the markup format by rendering it as -reStructuredText, and if that fails, treating it as plain text. Allowing -the intended format to be stated explicitly will allow this guessing to be -removed, and more informative error reports to be provided to users when -a rendering error occurs. - -This is especially necessary since PyPI applies additional restrictions to + +Included text documents +----------------------- + +Currently, PyPI attempts to determine the description's markup format by +rendering it as reStructuredText, and if that fails, treating it as plain +text. + +Furthermore, many projects simply read their long description in from an +existing README file in ``setup.py``. The popularity of this practice is +only expected to increase, as many online version control systems +(including both GitHub and BitBucket) automatically display such files +on the landing page for the project. + +Standardising on the inclusion of the long description as a separate +file in the ``dist-info`` directory allows this to be simplified: + +* An existing file can just be copied into the ``dist-info`` directory as + part of creating the sdist +* The expected markup format can be determined by inspecting the file + extension of the specified path + +Allowing the intended format to be stated explicitly in the path allows +the format guessing to be removed and more informative error reports to be +provided to users when a rendering error occurs. + +This is especially helpful since PyPI applies additional restrictions to the rendering process for security reasons, thus a description that renders correctly on a developer's system may still fail to render on the server. - -Deferred features -================= +The document naming system used to achieve this then makes it relatively +straightforward to allow declaration of alternative markup formats like +HTML, Markdown and AsciiDoc through the use of appropriate file +extensions, as well as to define similar included documents for the +project's license and changelog. + +Grouping the included document names into a single top level field gives +automated tools the option of treating them as arbitrary documents without +worrying about their contents. + +Requiring that the included documents be added to the ``dist-info`` metadata +directory means that the complete metadata for the distribution can be +extracted from an sdist or binary archive simply by extracting that +directory, without needing to check for references to other files in the +sdist. + + +Appendix D: Deferred features +============================= Several potentially useful features have been deliberately deferred in order to better prioritise our efforts in migrating to the new metadata @@ -1793,15 +2380,25 @@ new metadata, but which can be readily added in metadata 2.1 without breaking any use cases already supported by metadata 2.0. -Once the ``pypi``, ``setuptools``, ``pip`` and ``distlib`` projects -support creation and consumption of metadata 2.0, then we may revisit -the creation of metadata 2.1 with these additional features. - -.. note:: - - Given the nature of this PEP as an interoperability specification, - this section will probably be removed before the PEP is accepted. - However, it's useful to have it here while discussion is ongoing. +Once the ``pypi``, ``setuptools``, ``pip``, ``wheel`` and ``distlib`` +projects support creation and consumption of metadata 2.0, then we may +revisit the creation of metadata 2.1 with some or all of these additional +features. + + +MIME type registration +---------------------- + +At some point after acceptance of the PEP, I will likely submit the +following MIME type registration requests to IANA: + +* Full metadata: ``application/vnd.python.pydist+json`` +* Essential dependency resolution metadata: + ``application/vnd.python.pydist-dependencies+json`` + +It's even possible we may be able to just register the ``vnd.python`` +namespace under the banner of the PSF rather than having to register +the individual subformats. String methods in environment markers @@ -1816,62 +2413,82 @@ than a little strange. -Module listing --------------- - -A top level ``"module"`` key, referencing a list of strings, with each -giving the fully qualified name of a public package or module provided -by the distribution. - -A flat list would be used in order to correctly accommodate namespace -packages (where a distribution may provide subpackages or submodules without -explicitly providing the parent namespace package). - -Example:: - - "modules": [ - "comfy.chair" - ] +Module and file listings +------------------------ + +Derived metadata giving the modules and files included in built +distributions may be useful at some point in the future. (At least RPM +provides this, and I believe the APT equivalent does as well) Explicitly providing a list of public module names will likely help with enabling features in RPM like "Requires: python(requests)", as well as providing richer static metadata for analysis from PyPI. -However, this is just extra info that doesn't impact installing from wheels, -so it is a good candidate for postponing to metadata 2.1. - - -Additional metabuild hooks --------------------------- - -The following draft metabuild operations have been deferred for now: +However, this is just extra info that doesn't impact reliably installing +from wheels, so it is a good candidate for postponing to metadata 2.1 +(at the earliest). + + +Additional install hooks +------------------------ + +In addition to the postinstall and preuninstall hooks described in the PEP, +other distribution systems (like RPM) include the notion of preinstall +and postuninstall hooks. These hooks would run with the runtime dependencies +installed, but without the distribution itself. These have been deliberately +omitted, as they're well suited to being explored further as metadata +extensions. + +Similarly, the idea of "optional" postinstall and preuninstall hooks can +be pursued as a metadata extension. + +By contrast, the mandatory postinstall and preuninstall hooks have been +included directly in the PEP, specifically to ensure installation tools +don't silently ignore them. This ensures users will either be able to +install such distributions, or else receive an explicit error at installation +time. + + +Metabuild system +---------------- + +This version of the metadata specification continues to use ``setup.py`` +and the distutils command syntax to invoke build and test related +operations on a source archive or VCS checkout. + +It may be desirable to replace these in the future with tool independent +entry points that support: * Generating the metadata file on a development system -* Generating a source archive on a development system +* Generating an sdist on a development system * Generating a binary archive on a build system +* Running the test suite on a built (but not installed) distribution Metadata 2.0 deliberately focuses on wheel based installation, leaving -tarball and sdist based installation to use the existing ``setup.py`` -based ``distutils`` command interface. - -In the meantime, the above four operations will continue to be handled -through the ``distutils``/``setuptools`` command system: +sdist, source archive, and VCS checkout based installation to use the +existing ``setup.py`` based ``distutils`` command interface. + +In the meantime, the above operations will be handled through the +``distutils``/``setuptools`` command system: * ``python setup.py dist_info`` * ``python setup.py sdist`` +* ``python setup.py build_ext --inplace`` +* ``python setup.py test`` * ``python setup.py bdist_wheel`` -The following additional metabuild hooks may be added in metadata 2.1 to +The following metabuild hooks may be defined in metadata 2.1 to cover these operations without relying on ``setup.py``: -* ``make_dist_info``: generate the source archive's dist_info directory -* ``make_sdist``: construct a source archive -* ``build_wheel``: construct a binary wheel archive from an sdist source - archive - -Tentative signatures have been designed for those hooks, but they will -not be pursued further until 2.1 (note that the current signatures for -the hooks do *not* adequately handle the "extras" concept):: +* ``make_dist_info``: generate the sdist's dist_info directory +* ``make_sdist``: create the contents of an sdist +* ``build_dist``: create the contents of a binary wheel archive from an + unpacked sdist +* ``test_built_dist``: run the test suite for a built distribution + +Tentative signatures have been designed for those hooks, but in order to +better focus initial development efforts on the integration and installation +use cases, they will not be pursued further until metadata 2.1:: def make_dist_info(source_dir, info_dir): """Generate the contents of dist_info for an sdist archive @@ -1896,11 +2513,11 @@ Returns the distribution metadata as a dictionary. """ - def build_wheel(sdist_dir, contents_dir, info_dir, compatibility=None): - """Generate the contents of a wheel archive - - *source_dir* points to an unpacked source archive - *contents_dir* is the destination where the wheel contents should be + def build_dist(sdist_dir, built_dir, info_dir, compatibility=None): + """Generate the contents of a binary wheel archive + + *sdist_dir* points to an unpacked sdist + *built_dir* is the destination where the wheel contents should be written (note that archiving the contents is the responsibility of the metabuild tool rather than the hook function) *info_dir* is the destination where the wheel metadata files should @@ -1912,31 +2529,102 @@ Returns the actual compatibility tag for the build """ - -Rejected Features -================= + def test_built_dist(sdist_dir, built_dir, info_dir): + """Check a built (but not installed) distribution works as expected + + *sdist_dir* points to an unpacked sdist + *built_dir* points to a platform appropriate unpacked wheel archive + (which may be missing the wheel metadata directory) + *info_dir* points to the appropriate wheel metadata directory + + Requires that the distribution's test dependencies be installed + (indicated by the ``:test:`` extra). + + Returns ``True`` if the check passes, ``False`` otherwise. + """ + +As with the existing install hooks, checking for extras would be done +using the same import based checks as are used for runtime extras. That +way it doesn't matter if the additional dependencies were requested +explicitly or just happen to be available on the system. + +There are still a number of open questions with this design, such as whether +a single build hook is sufficient to cover both "build for testing" and +"prep for deployment", as well as various complexities like support for +cross-compilation of binaries, specification of target platforms and +Python versions when creating wheel files, etc. + +Opting to retain the status quo for now allows us to make progress on +improved metadata publication and binary installation support, rather than +having to delay that awaiting the creation of a viable metabuild framework. + + +Appendix E: Rejected features +============================= The following features have been explicitly considered and rejected as introducing too much additional complexity for too small a gain in expressiveness. -.. note:: - - Given the nature of this PEP as an interoperability specification, - this section will probably be removed before the PEP is accepted. - However, it's useful to have it here while discussion is ongoing. - - -Detached metadata ------------------ - -Rather than allowing some large items (such as the description field) to -be distributed separately, this PEP instead defines two metadata subsets -that should support more reasonable caching and API designs (for example, -only the essential dependency resolution metadata would be distributed -through TUF, and it is entirely possible the updated sdist, wheel and -installation database specs will use the abbreviated metadata, leaving -the full metadata as the province of index servers). + +Separate lists for conditional and unconditional dependencies +------------------------------------------------------------- + +Earlier versions of this PEP used separate lists for conditional and +unconditional dependencies. This turned out to be annoying to handle in +automated tools and removing it also made the PEP and metadata schema +substantially shorter, suggesting it was actually harder to explain as well. + + +Disallowing underscores in distribution names +--------------------------------------------- + +Debian doesn't actually permit underscores in names, but that seems +unduly restrictive for this spec given the common practice of using +valid Python identifiers as Python distribution names. A Debian side +policy of converting underscores to hyphens seems easy enough to +implement (and the requirement to consider hyphens and underscores as +equivalent ensures that doing so won't introduce any conflicts). + + +Allowing the use of Unicode in distribution names +------------------------------------------------- + +This PEP deliberately avoids following Python 3 down the path of arbitrary +Unicode identifiers, as the security implications of doing so are +substantially worse in the software distribution use case (it opens +up far more interesting attack vectors than mere code obfuscation). + +In addition, the existing tools really only work properly if you restrict +names to ASCII and changing that would require a *lot* of work for all +the automated tools in the chain. + +It may be reasonable to revisit this question at some point in the (distant) +future, but setting up a more reliable software distribution system is +challenging enough without adding more general Unicode identifier support +into the mix. + + +Single list for conditional and unconditional dependencies +---------------------------------------------------------- + +It's technically possible to store the conditional and unconditional +dependencies of each kind in a single list and switch the handling based on +the entry type (string or mapping). + +However, the current ``*requires`` vs ``*may-require`` two list design seems +easier to understand and work with, since it's only the conditional +dependencies that need to be checked against the requested extras list and +the target installation environment. + + +Depending on source labels +-------------------------- + +There is no mechanism to express a dependency on a source label - they +are included in the metadata for internal project reference only. Instead, +dependencies must be expressed in terms of either public versions or else +direct URL references. Alternative dependencies @@ -1961,7 +2649,7 @@ database driver" metadata extension where a project depends on SQL Alchemy, and then declares in the extension which database drivers are checked for compatibility by the upstream project (similar to the advisory -``supports-platform`` field in the main metadata). +``supports_environments`` field in the main metadata). We're also getting better support for "virtual provides" in this version of the metadata standard, so this may end up being an installer and index @@ -1989,9 +2677,67 @@ Under the revised metadata design, conditional "provides" based on runtime features or the environment would go in a separate "may_provide" field. -However, I'm not convinced there's a great use case for that, so the idea +However, it isn't clear there's any use case for doing that, so the idea is rejected unless someone can present a compelling use case (and even then -the idea wouldn't be reconsidered until metadata 2.1 at the earliest). +the idea won't be reconsidered until metadata 2.1 at the earliest). + + +A hook to run tests against installed distributions +--------------------------------------------------- + +Earlier drafts of this PEP defined a hook for running automated +tests against an *installed* distribution. This isn't actually what you +generally want - you want the ability to test a *built* distribution, +potentially relying on files which won't be included in the binary archives. + +RPM's "check" step also runs between the build step and the install step, +rather than after the install step. + +Accordingly, the ``test_installed_dist`` hook has been removed, and the +``test_built_dist`` metabuild hook has been tentatively defined. However, +along with the rest of the metabuild hooks, further consideration has been +deferred until metadata 2.1 at the earliest. + + +Extensible signatures for the install hooks +------------------------------------------- + +The install hooks have been deliberately designed to NOT accept arbitary +keyword arguments that the hook implementation is then expected to ignore. + +The argument in favour of that API design technique is to allow the addition +of new optional arguments in the future, without requiring the definition +of a new install hook, or migration to version 3.0 of the metadata +specification. It is a technique very commonly seen in function wrappers +which merely pass arguments along to the inner function rather than +processing them directly. + +However, the install hooks are already designed to have access to the full +metadata for the distribution (including all metadata extensions and +the previous/next version when appropriate), as well as to the full target +deployment environment. + +This means there are two candidates for additional information that +could be passed as arbitrary keyword arguments: + +* installer dependent settings +* user provided installation options + +The first of those runs explicitly counter to one of the core goals of the +metadata 2.0 specification: decoupling the software developer's choice of +development and publication tools from the software integrator's choice of +integration and deployment tools. + +The second is a complex problem that has a readily available workaround in +the form of operating system level environment variables (this is also +one way to interoperate with platform specific installation tools). + +Alternatively, installer developers may either implicitly inject an +additional metadata extension when invoking the install hook, or else +define an alternate hook signature as a distinct metadata extension to be +provided by the distribution. Either of these approaches makes the +reliance on installer-dependent behaviour suitably explicit in either +the install hook implementation or the distribution metadata. References diff --git a/pep-0426/pydist-schema.json b/pep-0426/pydist-schema.json new file mode 100644 --- /dev/null +++ b/pep-0426/pydist-schema.json @@ -0,0 +1,329 @@ +{ + "id": "http://www.python.org/dev/peps/pep-0426/", + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Metadata for Python Software Packages 2.0", + "type": "object", + "properties": { + "metadata_version": { + "description": "Version of the file format", + "type": "string", + "pattern": "^(\\d+(\\.\\d+)*)$" + }, + "generator": { + "description": "Name and version of the program that produced this file.", + "type": "string", + "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])( \\(.*\\))?$" + }, + "name": { + "description": "The name of the distribution.", + "type": "string", + "$ref": "#/definitions/distribution_name" + }, + "version": { + "description": "The distribution's public version identifier", + "type": "string", + "pattern": "^(\\d+(\\.\\d+)*)((a|b|c|rc)(\\d+))?(\\.(post)(\\d+))?(\\.(dev)(\\d+))?$" + }, + "source_label": { + "description": "A constrained identifying text string", + "type": "string", + "pattern": "^[0-9a-z_.-+]+$" + }, + "source_url": { + "description": "A string containing a full URL where the source for this specific version of the distribution can be downloaded.", + "type": "string", + "format": "uri" + }, + "summary": { + "description": "A one-line summary of what the distribution does.", + "type": "string" + }, + "document_names": { + "description": "Names of supporting metadata documents", + "type": "object", + "properties": { + "description": { + "type": "string", + "$ref": "#/definitions/document_name" + }, + "changelog": { + "type": "string", + "$ref": "#/definitions/document_name" + }, + "license": { + "type": "string", + "$ref": "#/definitions/document_name" + } + }, + "additionalProperties": false + }, + "keywords": { + "description": "A list of additional keywords to be used to assist searching for the distribution in a larger catalog.", + "type": "array", + "items": { + "type": "string" + } + }, + "license": { + "description": "A string indicating the license covering the distribution.", + "type": "string" + }, + "classifiers": { + "description": "A list of strings, with each giving a single classification value for the distribution.", + "type": "array", + "items": { + "type": "string" + } + }, + "contacts": { + "description": "A list of contributor entries giving the recommended contact points for getting more information about the project.", + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/contact" + } + }, + "contributors": { + "description": "A list of contributor entries for other contributors not already listed as current project points of contact.", + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/contact" + } + }, + "project_urls": { + "description": "A mapping of arbitrary text labels to additional URLs relevant to the project.", + "type": "object" + }, + "extras": { + "description": "A list of optional sets of dependencies that may be used to define conditional dependencies in \"may_require\" and similar fields.", + "type": "array", + "items": { + "type": "string", + "$ref": "#/definitions/extra_name" + } + }, + "meta_requires": { + "description": "A list of subdistributions made available through this metadistribution.", + "type": "array", + "$ref": "#/definitions/dependencies" + }, + "run_requires": { + "description": "A list of other distributions needed to run this distribution.", + "type": "array", + "$ref": "#/definitions/dependencies" + }, + "test_requires": { + "description": "A list of other distributions needed when this distribution is tested.", + "type": "array", + "$ref": "#/definitions/dependencies" + }, + "build_requires": { + "description": "A list of other distributions needed when this distribution is built.", + "type": "array", + "$ref": "#/definitions/dependencies" + }, + "dev_requires": { + "description": "A list of other distributions needed when this distribution is developed.", + "type": "array", + "$ref": "#/definitions/dependencies" + }, + "provides": { + "description": "A list of strings naming additional dependency requirements that are satisfied by installing this distribution. These strings must be of the form Name or Name (Version)", + "type": "array", + "items": { + "type": "string", + "$ref": "#/definitions/provides_declaration" + } + }, + "modules": { + "description": "A list of modules and/or packages available for import after installing this distribution.", + "type": "array", + "items": { + "type": "string", + "$ref": "#/definitions/qualified_name" + } + }, + "namespaces": { + "description": "A list of namespace packages this distribution contributes to", + "type": "array", + "items": { + "type": "string", + "$ref": "#/definitions/qualified_name" + } + }, + "commands": { + "description": "Command line interfaces provided by this distribution", + "type": "object", + "$ref": "#/definitions/commands" + }, + "exports": { + "description": "Other exported interfaces provided by this distribution", + "type": "object", + "$ref": "#/definitions/exports" + }, + "obsoleted_by": { + "description": "A string that indicates that this project is no longer being developed. The named project provides a substitute or replacement.", + "type": "string", + "$ref": "#/definitions/requirement" + }, + "supports_environments": { + "description": "A list of strings specifying the environments that the distribution explicitly supports.", + "type": "array", + "items": { + "type": "string", + "$ref": "#/definitions/environment_marker" + } + }, + "install_hooks": { + "description": "The install_hooks field is used to define various operations that may be invoked on a distribution in a platform independent manner.", + "type": "object", + "properties": { + "postinstall": { + "type": "string", + "$ref": "#/definitions/export_specifier" + }, + "preuninstall": { + "type": "string", + "$ref": "#/definitions/export_specifier" + } + } + }, + "extensions": { + "description": "Extensions to the metadata may be present in a mapping under the 'extensions' key.", + "type": "object" + } + }, + + "required": ["metadata_version", "name", "version", "summary"], + "additionalProperties": false, + + "definitions": { + "contact": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "email": { + "type": "string" + }, + "url": { + "type": "string" + }, + "role": { + "type": "string" + } + }, + "required": ["name"], + "additionalProperties": false + }, + "dependencies": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/dependency" + } + }, + "dependency": { + "type": "object", + "properties": { + "extra": { + "type": "string", + "$ref": "#/definitions/valid_name" + }, + "environment": { + "type": "string", + "$ref": "#/definitions/environment_marker" + }, + "requires": { + "type": "array", + "items": { + "type": "string", + "$ref": "#/definitions/requirement" + } + } + }, + "required": ["requires"], + "additionalProperties": false + }, + "commands": { + "type": "object", + "properties": { + "wrap_console": { + "type": "object", + "$ref": "#/definitions/command_map" + }, + "wrap_gui": { + "type": "object", + "$ref": "#/definitions/command_map" + }, + "prebuilt": { + "type": "array", + "items": { + "type": "string", + "$ref": "#/definitions/relative_path" + } + } + }, + "additionalProperties": false + }, + "exports": { + "type": "object", + "patternProperties": { + "^[A-Za-z]([0-9A-Za-z_])*([.][A-Za-z]([0-9A-Za-z_])*)*$": { + "type": "object", + "patternProperties": { + ".": { + "type": "string", + "$ref": "#/definitions/export_specifier" + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "command_map": { + "type": "object", + "patternProperties": { + "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$": { + "type": "string", + "$ref": "#/definitions/export_specifier" + } + }, + "additionalProperties": false + }, + "distribution_name": { + "type": "string", + "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$" + }, + "requirement": { + "type": "string" + }, + "provides_declaration": { + "type": "string" + }, + "environment_marker": { + "type": "string" + }, + "document_name": { + "type": "string" + }, + "extra_name" : { + "type": "string", + "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$" + }, + "relative_path" : { + "type": "string" + }, + "export_specifier": { + "type": "string", + "pattern": "^([A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)(:[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)?(\\[[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?\\])?$" + }, + "qualified_name" : { + "type": "string", + "pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*$" + } + } +} diff --git a/pep-0432.txt b/pep-0432.txt --- a/pep-0432.txt +++ b/pep-0432.txt @@ -3,11 +3,11 @@ Version: $Revision$ Last-Modified: $Date$ Author: Nick Coghlan -Status: Draft +Status: Deferred Type: Standards Track Content-Type: text/x-rst Created: 28-Dec-2012 -Python-Version: 3.4 +Python-Version: 3.5 Post-History: 28-Dec-2012, 2-Jan-2013 @@ -25,6 +25,31 @@ implementation is developed. +PEP Deferral +============ + +Python 3.4 is nearing its first alpha, and already includes a couple of +significant low level changes in PEP 445 (memory allocator customisation) +and PEP 442 (safe object finalization). As a result of the latter PEP, +the shutdown procedure of CPython has also been changed to be more heavily +reliant on the cyclic garbage collector, significantly reducing the +number of modules that will experience the "module globals set to None" +behaviour that is used to deliberate break cycles and attempt to releases +more external resources cleanly. + +Furthermore, I am heavily involved in the current round of updates to the +Python packaging ecosystem (as both the lead author of PEP 426 and +BDFL-delegate for several other PEPs), leaving little to spare to work on +this proposal. The other developers I would trust to lead this effort are +also working on other things. + +So, due to those practical resource constraints, the proximity of Python +3.4 deadlines, and recognition that making too many significant changes to +the low level CPython infrastructure in one release is likely to be unwise, +further work on this PEP has been deferred to the Python 3.5 development +cycle. + + Proposal ======== diff --git a/pep-0435.txt b/pep-0435.txt --- a/pep-0435.txt +++ b/pep-0435.txt @@ -5,7 +5,7 @@ Author: Barry Warsaw , Eli Bendersky , Ethan Furman -Status: Accepted +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 2013-02-23 @@ -467,6 +467,10 @@ ... cat = 3 ... dog = 4 +The reason for defaulting to ``1`` as the starting number and not ``0`` is +that ``0`` is ``False`` in a boolean sense, but enum members all evaluate +to ``True``. + Proposed variations =================== diff --git a/pep-0439.txt b/pep-0439.txt --- a/pep-0439.txt +++ b/pep-0439.txt @@ -45,6 +45,12 @@ considerably reduced. It is hoped that this will therefore increase the likelihood that Python projects will reuse third party software. +The Python community also has an issue of complexity around the current +bootstrap procedure for pip and setuptools. They all have +their own bootstrap download file with slightly different usages and +even refer to each other in some cases. Having a single bootstrap which +is common amongst them all, with a simple usage, would be far preferable. + It is also hoped that this is reduces the number of proposals to include more and more software in the Python standard library, and therefore that more popular Python software is more easily upgradeable @@ -54,23 +60,32 @@ Proposal ======== -This proposal affects three components of packaging: `the pip bootstrap`_, -`setuptools`_ and, thanks to easier package installation, `modifications to -publishing packages`_. +The bootstrap will install the pip implementation, setuptools by downloading +their installation files from PyPI. + +This proposal affects two components of packaging: `the pip bootstrap`_ and, +thanks to easier package installation, `modifications to publishing +packages`_. + +The core of this proposal is that the user experience of using pip should not +require the user to install pip. The pip bootstrap ----------------- The Python installation includes an executable called "pip3" (see PEP 394 for -naming rationale etc.) that attempts to import pip machinery. If it can -then the pip command proceeds as normal. If it cannot it will bootstrap pip by -downloading the pip implementation wheel file. Once installed, the pip command -proceeds as normal. +naming rationale etc.) that attempts to import pip machinery. If it can then +the pip command proceeds as normal. If it cannot it will bootstrap pip by +downloading the pip implementation and setuptools wheel files. Hereafter the +installation of the "pip implementation" will imply installation of setuptools +and virtualenv. Once installed, the pip command proceeds as normal. Once the +bootstrap process is complete the "pip3" command is no longer the bootstrap +but rather the full pip command. -A boostrap is used in the place of a the full pip code so that we -don't have to bundle pip and also the install tool is upgradeable -outside of the regular Python upgrade timeframe and processes. +A boostrap is used in the place of a the full pip code so that we don't have +to bundle pip and also pip is upgradeable outside of the regular Python +upgrade timeframe and processes. To avoid issues with sudo we will have the bootstrap default to installing the pip implementation to the per-user site-packages @@ -88,82 +103,58 @@ 2. The user will invoke a pip command, typically "pip3 install ", for example "pip3 install Django". 3. The boostrap script will attempt to import the pip implementation. - If this succeeds, the pip command is processed normally. + If this succeeds, the pip command is processed normally. Stop. 4. On failing to import the pip implementation the bootstrap notifies - the user that it is "upgrading pip" and contacts PyPI to obtain the - latest download wheel file (see PEP 427.) -5. Upon downloading the file it is installed using the distlib - installation machinery for wheel packages. Upon completing the - installation the user is notified that "pip3 has been upgraded." - TODO how is it verified? -6. The pip tool may now import the pip implementation and continues to + the user that it needs to "install pip". It will ask the user whether it + should install pip as a system-wide site-packages or as a user-only + package. This choice will also be present as a command-line option to pip + so non-interactive use is possible. +5. The bootstrap will and contact PyPI to obtain the latest download wheel + file (see PEP 427.) +6. Upon downloading the file it is installed using "python setup.py install". +7. The pip tool may now import the pip implementation and continues to process the requested user command normally. Users may be running in an environment which cannot access the public Internet and are relying solely on a local package repository. They would use the "-i" (Base URL of Python Package Index) argument to the -"pip3 install" command. This use case will be handled by: +"pip3 install" command. This simply overrides the default index URL pointing +to PyPI. -1. Recognising the command-line arguments that specify alternative or - additional locations to discover packages and attempting to - download the package from those locations. -2. If the package is not found there then we attempt to donwload it - using the standard "https://pypi.python.org/pypi/simple/pip" index. -3. If that also fails, for any reason, we indicate to the user the - operation we were attempting, the reason for failure (if we know - it) and display further instructions for downloading and installing - the file manually. +Some users may have no Internet access suitable for fetching the pip +implementation file. These users can manually download and install the +setuptools and pip tar files. Adding specific support for this use-case is +unnecessary. -Manual installation of the pip implementation will be supported -through the manual download of the wheel file and "pip3 install -". - -This installation will not perform standard pip installation steps of -saving the file to a cache directory or updating any local database of -installed files. - -The download of the pip implementation install file should be performed -securely. The transport from pypi.python.org will be done over HTTPS but the CA -certificate check will most likely not be performed, and therefore the download -would still be vulnerable to active MITM attacks. To mitigate this -risk we will use the embedded signature support in the wheel format to validate -the downloaded file. +The download of the pip implementation install file will be performed +securely. The transport from pypi.python.org will be done over HTTPS with the +CA certificate check performed. This facility will be present in Python 3.4+ +using Operating System certificates (see PEP XXXX). Beyond those arguments controlling index location and download options, the "pip3" boostrap command may support further standard pip options for verbosity, quietness and logging. +The "pip3" command will support two new command-line options that are used +in the boostrapping, and otherwise ignored. They control where the pip +implementation is installed: + +--bootstrap + Install to the user's packages directory. The name of this option is chosen + to promote it as the preferred installation option. + +--bootstrap-to-system + Install to the system site-packages directory. + +These command-line options will also need to be implemented, but otherwise +ignored, in the pip implementation. + +Consideration should be given to defaulting pip to install packages to the +user's packages directory if pip is installed in that location. + The "--no-install" option to the "pip3" command will not affect the bootstrapping process. -setuptools ----------- - -The deprecation of requiring setuptools for installation is an existing goal of -the packaging comminity (TODO ref needed). Currently pip depends upon setuptools -functionality, and it is installed by the current pip boostrap. This PEP does -not propose installing setuptools during the new bootstrap. - -It is intended that before Python 3.4 is shipped the functionlity required by -pip will be present in Python's standard library as the distlib module, and that -pip would be modified to use that functionality when present. TODO PEP reference -for distlib - -Many existing "setup.py" files require setuptools to be installed (because one -of the first things they do is import setuptools). It is intended that pip's -behaviour will be either: - -1. If setuptools is not present it can only install from wheel files and - sdists with 2.0+ metadata, or -2. If setuptools is present it can also install from sdists with legacy - metadata and eggs - -By default, installing setuptools when necessary should be automatic so that -users are not inconvenienced, but advanced users should be able to ask that it -instead be treated as an error if no wheel is available to satisfy an -installation request or dependency (so they don't inadvertently install -setuptools on their production systems if they don't want to). - Modifications to publishing packages ------------------------------------ @@ -189,22 +180,36 @@ ============== The changes to pip required by this PEP are being tracked in that project's -issue tracker [2]_ +issue tracker [2]_. Most notably, the addition of --bootstrap and --bootstrap- +to-system to the pip command-line. + +It would be preferable that the pip and setuptools projects distribute a wheel +format download. + +The required code for this implementation is the "pip3" command described +above. The additional pypublish can be developed outside of the scope of this +PEP's work. + +Finally, it would be desirable that "pip3" be ported to Python 2.6+ to allow +the single command to replace existing pip, setuptools and virtualenv (which +would be added to the bootstrap) bootstrap scripts. Having that bootstrap +included in a future Python 2.7 release would also be highly desirable. Risks ===== -The Fedora variant of Linux has had a separate program called "pip" (a -Perl package installer) available for install for some time. The -current Python "pip" program is installed as "pip-python". It is -hoped that the Fedora community will resolve this issue by renaming -the Perl installer. - The key that is used to sign the pip implementation download might be compromised and this PEP currently proposes no mechanism for key revocation. +There is a Perl package installer also named "pip". It is quite rare and not +commonly used. The Fedora variant of Linux has historically named Python's +"pip" as "python-pip" and Perl's "pip" as "perl-pip". This policy has been +altered[3] so that future and upgraded Fedora installations will use the name +"pip" for Python's "pip". Existing (non-upgraded) installations will still +have the old name for the Python "pip", though the potential for confusion is +now much reduced. References @@ -216,6 +221,9 @@ .. [2] pip issue tracking work needed for this PEP https://github.com/pypa/pip/issues/863 +.. [3] Fedora's python-pip package does not provide /usr/bin/pip + https://bugzilla.redhat.com/show_bug.cgi?id=958377 + Acknowledgments =============== @@ -223,7 +231,9 @@ Nick Coghlan for his thoughts on the proposal and dealing with the Red Hat issue. -Jannis Leidel and Carl Meyer for their thoughts. +Jannis Leidel and Carl Meyer for their thoughts. Marcus Smith for feedback. + +Marcela Ma?l??ov? for resolving the Fedora issue. Copyright diff --git a/pep-0440.txt b/pep-0440.txt --- a/pep-0440.txt +++ b/pep-0440.txt @@ -9,7 +9,7 @@ Type: Standards Track Content-Type: text/x-rst Created: 18 Mar 2013 -Post-History: 30 Mar 2013, 27-May-2013 +Post-History: 30 Mar 2013, 27 May 2013, 20 Jun 2013 Replaces: 386 @@ -27,7 +27,7 @@ This PEP was broken out of the metadata 2.0 specification in PEP 426. Unlike PEP 426, the notes that remain in this document are intended as - part of the final specification. + part of the final specification (except for this one). Definitions @@ -40,7 +40,7 @@ The following terms are to be interpreted as described in PEP 426: * "Distributions" -* "Versions" +* "Releases" * "Build tools" * "Index servers" * "Publication tools" @@ -52,9 +52,13 @@ Version scheme ============== -Distribution versions are identified by both a public version identifier, -which supports all defined version comparison operations, and a build -label, which supports only strict equality comparisons. +Distributions are identified by a public version identifier which +supports all defined version comparison operations + +Distributions may also define a source label, which is not used by +automated tools. Source labels are useful when a project internal +versioning scheme requires translation to create a compliant public +version identifier. The version scheme is used both to describe the distribution version provided by a particular distribution archive, as well as to place @@ -84,7 +88,7 @@ * Post-release segment: ``.postN`` * Development release segment: ``.devN`` -Any given version will be a "release", "pre-release", "post-release" or +Any given release will be a "final release", "pre-release", "post-release" or "developmental release" as defined in the following sections. .. note:: @@ -99,34 +103,43 @@ sections. -Build labels ------------- +Source labels +------------- -Build labels are text strings with minimal defined semantics. +Source labels are text strings with minimal defined semantics. -To ensure build labels can be readily incorporated as part of file names -and URLs, they MUST be comprised of only ASCII alphanumerics, plus signs, -periods and hyphens. +To ensure source labels can be readily incorporated as part of file names +and URLs, and to avoid formatting inconsistences in hexadecimal hash +representations they MUST be limited to the following set of permitted +characters: -In addition, build labels MUST be unique within a given distribution. +* Lowercase ASCII letters (``[a-z]``) +* ASCII digits (``[0-9]``) +* underscores (``_``) +* hyphens (``-``) +* periods (``.``) +* plus signs (``+``) -As with distribution names, all comparisons of build labels MUST be case -insensitive. +Source labels MUST start and end with an ASCII letter or digit. +Source labels MUST be unique within each project and MUST NOT match any +defined version for the project. -Releases --------- -A version identifier that consists solely of a release segment is termed -a "release". +Final releases +-------------- -The release segment consists of one or more non-negative integer values, -separated by dots:: +A version identifier that consists solely of a release segment is +termed a "final release". + +The release segment consists of one or more non-negative integer +values, separated by dots:: N[.N]+ -Releases within a project will typically be numbered in a consistently -increasing fashion. +Final releases within a project MUST be numbered in a consistently +increasing fashion, otherwise automated tools will not be able to upgrade +them correctly. Comparison and ordering of release segments considers the numeric value of each component of the release segment in turn. When comparing release @@ -157,8 +170,8 @@ 2.0 2.0.1 -A release series is any set of release numbers that start with a common -prefix. For example, ``3.3.1``, ``3.3.5`` and ``3.3.9.45`` are all +A release series is any set of final release numbers that start with a +common prefix. For example, ``3.3.1``, ``3.3.5`` and ``3.3.9.45`` are all part of the ``3.3`` release series. .. note:: @@ -206,8 +219,8 @@ Post-releases ------------- -Some projects use post-releases to address minor errors in a release that -do not affect the distributed software (for example, correcting an error +Some projects use post-releases to address minor errors in a final release +that do not affect the distributed software (for example, correcting an error in the release notes). If used as part of a project's development cycle, these post-releases are @@ -371,7 +384,7 @@ .devN, aN, bN, cN, rcN, , .postN Note that `rc` will always sort after `c` (regardless of the numeric -component) although they are semantically equivalent. Tools are free to +component) although they are semantically equivalent. Tools MAY reject this case as ambiguous and remain in compliance with the PEP. Within an alpha (``1.0a1``), beta (``1.0b1``), or release candidate @@ -444,7 +457,7 @@ Some projects may choose to use a version scheme which requires translation in order to comply with the public version scheme defined in -this PEP. In such cases, the build label can be used to +this PEP. In such cases, the source label can be used to record the project specific version as an arbitrary label, while the translated public version is published in the version field. @@ -488,7 +501,7 @@ permitted in the public version field. As with semantic versioning, the public ``.devN`` suffix may be used to -uniquely identify such releases for publication, while the build label is +uniquely identify such releases for publication, while the source label is used to record the original DVCS based version label. @@ -496,7 +509,7 @@ ~~~~~~~~~~~~~~~~~~~ As with other incompatible version schemes, date based versions can be -stored in the build label field. Translating them to a compliant +stored in the source label field. Translating them to a compliant public version is straightforward: use a leading ``"0."`` prefix in the public version label, with the date based version number as the remaining components in the release segment. @@ -506,6 +519,22 @@ version comparison semantics. +Olson database versioning +~~~~~~~~~~~~~~~~~~~~~~~~~ + +The ``pytz`` project inherits its versioning scheme from the corresponding +Olson timezone database versioning scheme: the year followed by a lowercase +character indicating the version of the database within that year. + +This can be translated to a compliant 3-part version identifier as +``0..``, where the serial starts at zero (for the 'a' +release) and is incremented with each subsequent database update within the +year. + +As with other translated version identifiers, the corresponding Olson +database version would be recorded in the source label field. + + Version specifiers ================== @@ -521,7 +550,6 @@ * ``~=``: `Compatible release`_ clause * ``==``: `Version matching`_ clause * ``!=``: `Version exclusion`_ clause -* ``is``: `Build reference`_ clause * ``<=``, ``>=``: `Inclusive ordered comparison`_ clause * ``<``, ``>``: `Exclusive ordered comparison`_ clause @@ -605,6 +633,11 @@ release segment to ensure the release segments are compared with the same length. +Whether or not strict version matching is appropriate depends on the specific +use case for the version specifier. Automated tools SHOULD at least issue +warnings and MAY reject them entirely when strict version matches are used +inappropriately. + Prefix matching may be requested instead of strict comparison, by appending a trailing ``.*`` to the version identifier in the version matching clause. This means that additional trailing segments will be ignored when @@ -626,10 +659,6 @@ dependencies for repeatable *deployments of applications* while using a shared distribution index. -Publication tools and index servers SHOULD at least emit a warning when -dependencies are pinned in this fashion and MAY refuse to allow publication -of such overly specific dependencies. - Version exclusion ----------------- @@ -649,74 +678,6 @@ != 1.1.* # Same prefix, so 1.1.post1 does not match clause -Build reference ---------------- - -A build reference includes the build reference operator ``is`` and -a build label or a build URL. - -Publication tools and public index servers SHOULD NOT permit build -references in dependency specifications. - -Installation tools SHOULD support the use of build references to identify -dependencies. - -Build label matching works solely on strict equality comparisons: the -candidate build label must be exactly the same as the build label in the -version clause for the clause to match the candidate distribution. - -For example, a build reference could be used to depend on a ``hashdist`` -generated build of ``zlib`` with the ``hashdist`` hash used as a build -label:: - - zlib (is d4jwf2sb2g6glprsdqfdpcracwpzujwq) - -A build URL is distinguished from a build label by the presence of -``:`` and ``/`` characters in the build reference. As these characters -are not permitted in build labels, they indicate that the reference uses -a build URL. - -Some appropriate targets for a build URL are a binary archive, a -source tarball, an sdist archive or a direct reference to a tag or -specific commit in an online version control system. The exact URLs and -targets supported will be installation tool specific. - -For example, a local prebuilt wheel file may be referenced directly:: - - exampledist (is file:///localbuilds/exampledist-1.0-py33-none-any.whl) - -All build URL references SHOULD either specify a local file URL, a secure -transport mechanism (such as ``https``) or else include an expected hash -value in the URL for verification purposes. If an insecure network -transport is specified without any hash information (or with hash -information that the tool doesn't understand), automated tools SHOULD -at least emit a warning and MAY refuse to rely on the URL. - -It is RECOMMENDED that only hashes which are unconditionally provided by -the latest version of the standard library's ``hashlib`` module be used -for source archive hashes. At time of writing, that list consists of -``'md5'``, ``'sha1'``, ``'sha224'``, ``'sha256'``, ``'sha384'``, and -``'sha512'``. - -For binary or source archive references, an expected hash value may be -specified by including a ``=`` as part of -the URL fragment. - -For version control references, the ``VCS+protocol`` scheme SHOULD be -used to identify both the version control system and the secure transport. - -To support version control systems that do not support including commit or -tag references directly in the URL, that information may be appended to the -end of the URL using the ``@`` notation. - -The use of ``is`` when defining dependencies for published distributions -is strongly discouraged as it greatly complicates the deployment of -security fixes. The build label matching operator is intended primarily -for use when defining dependencies for repeatable *deployments of -applications* while using a shared distribution index, as well as to -reference dependencies which are not published through an index server. - - Inclusive ordered comparison ---------------------------- @@ -755,62 +716,108 @@ ------------------------ Pre-releases of any kind, including developmental releases, are implicitly -excluded from all version specifiers, *unless* a pre-release or developmental -release is explicitly mentioned in one of the clauses. For example, these -specifiers implicitly exclude all pre-releases and development -releases of later versions:: - - 2.2 - >= 1.0 - -While these specifiers would include at least some of them:: - - 2.2.dev0 - 2.2, != 2.3b2 - >= 1.0a1 - >= 1.0c1 - >= 1.0, != 1.0b2 - >= 1.0, < 2.0.dev123 +excluded from all version specifiers, *unless* they are already present +on the system, explicitly requested by the user, or if the only available +version that satisfies the version specifier is a pre-release. By default, dependency resolution tools SHOULD: * accept already installed pre-releases for all version specifiers -* accept remotely available pre-releases for version specifiers which - include at least one version clauses that references a pre-release +* accept remotely available pre-releases for version specifiers where + there is no final or post release that satisfies the version specifier * exclude all other pre-releases from consideration +Dependency resolution tools MAY issue a warning if a pre-release is needed +to satisfy a version specifier. + Dependency resolution tools SHOULD also allow users to request the following alternative behaviours: * accepting pre-releases for all version specifiers * excluding pre-releases for all version specifiers (reporting an error or - warning if a pre-release is already installed locally) + warning if a pre-release is already installed locally, or if a + pre-release is the only way to satisfy a particular specifier) Dependency resolution tools MAY also allow the above behaviour to be controlled on a per-distribution basis. -Post-releases and purely numeric releases receive no special treatment in -version specifiers - they are always included unless explicitly excluded. +Post-releases and final releases receive no special treatment in version +specifiers - they are always included unless explicitly excluded. Examples -------- -* ``3.1``: version 3.1 or later, but not - version 4.0 or later. Excludes pre-releases and developmental releases. -* ``3.1.2``: version 3.1.2 or later, but not - version 3.2.0 or later. Excludes pre-releases and developmental releases. -* ``3.1a1``: version 3.1a1 or later, but not - version 4.0 or later. Allows pre-releases like 3.2a4 and developmental - releases like 3.2.dev1. +* ``3.1``: version 3.1 or later, but not version 4.0 or later. +* ``3.1.2``: version 3.1.2 or later, but not version 3.2.0 or later. +* ``3.1a1``: version 3.1a1 or later, but not version 4.0 or later. * ``== 3.1``: specifically version 3.1 (or 3.1.0), excludes all pre-releases, post releases, developmental releases and any 3.1.x maintenance releases. -* ``== 3.1.*``: any version that starts with 3.1, excluding pre-releases and - developmental releases. Equivalent to the ``3.1.0`` compatible release - clause. +* ``== 3.1.*``: any version that starts with 3.1. Equivalent to the + ``3.1.0`` compatible release clause. * ``3.1.0, != 3.1.3``: version 3.1.0 or later, but not version 3.1.3 and - not version 3.2.0 or later. Excludes pre-releases and developmental - releases. + not version 3.2.0 or later. + + +Direct references +================= + +Some automated tools may permit the use of a direct reference as an +alternative to a normal version specifier. A direct reference consists of +the word ``from`` and an explicit URL. + +Whether or not direct references are appropriate depends on the specific +use case for the version specifier. Automated tools SHOULD at least issue +warnings and MAY reject them entirely when direct references are used +inappropriately. + +Public index servers SHOULD NOT allow the use of direct references in +uploaded distributions. Direct references are intended as a tool for +software integrators rather than publishers. + +Depending on the use case, some appropriate targets for a direct URL +reference may be a valid ``source_url`` entry (see PEP 426), an sdist, or +a wheel binary archive. The exact URLs and targets supported will be tool +dependent. + +For example, a local source archive may be referenced directly:: + + pip (from file:///localbuilds/pip-1.3.1.zip) + +Alternatively, a prebuilt archive may also be referenced:: + + pip (from file:///localbuilds/pip-1.3.1-py33-none-any.whl) + +All direct references that do not refer to a local file URL SHOULD +specify a secure transport mechanism (such as ``https``), include an +expected hash value in the URL for verification purposes, or both. If an +insecure transport is specified without any hash information, with hash +information that the tool doesn't understand, or with a selected hash +algorithm that the tool considers too weak to trust, automated tools +SHOULD at least emit a warning and MAY refuse to rely on the URL. + +It is RECOMMENDED that only hashes which are unconditionally provided by +the latest version of the standard library's ``hashlib`` module be used +for source archive hashes. At time of writing, that list consists of +``'md5'``, ``'sha1'``, ``'sha224'``, ``'sha256'``, ``'sha384'``, and +``'sha512'``. + +For source archive and wheel references, an expected hash value may be +specified by including a ``=`` entry as +part of the URL fragment. + +Version control references, the ``VCS+protocol`` scheme SHOULD be +used to identify both the version control system and the secure transport. + +To support version control systems that do not support including commit or +tag references directly in the URL, that information may be appended to the +end of the URL using the ``@`` notation. + +Remote URL examples:: + + pip (from https://github.com/pypa/pip/archive/1.3.1.zip) + pip (from http://github.com/pypa/pip/archive/1.3.1.zip#sha1=da9234ee9982d4bbb3c72346a6de940a148ea686) + pip (from git+https://github.com/pypa/pip.git at 1.3.1) Updating the versioning specification @@ -823,56 +830,45 @@ versioning scheme and metadata version defined in new PEPs. -Open issues -=========== - -* The new ``is`` operator seems like a reasonable way to cleanly allow - installation tools to bring in non-published dependencies, while heavily - discouraging the practice for published libraries. It also makes - build labels more useful by allowing them to be used to pin dependencies - in the integration use case. - - However, it's an early draft of the idea, so feedback is definitely - welcome. - - Summary of differences from \PEP 386 ==================================== * Moved the description of version specifiers into the versioning PEP -* added the "build label" concept to better handle projects that wish to +* Added the "source label" concept to better handle projects that wish to use a non-compliant versioning scheme internally, especially those based on DVCS hashes - -* added the "compatible release" clause -* added the "build reference" clause +* Added the "direct reference" concept as a standard notation for direct + references to resources (rather than each tool needing to invents its own) -* added the trailing wildcard syntax for prefix based version matching +* Added the "compatible release" clause + +* Added the trailing wildcard syntax for prefix based version matching and exclusion -* changed the top level sort position of the ``.devN`` suffix +* Changed the top level sort position of the ``.devN`` suffix -* allowed single value version numbers +* Allowed single value version numbers -* explicit exclusion of leading or trailing whitespace +* Explicit exclusion of leading or trailing whitespace -* explicit criterion for the exclusion of date based versions +* Explicit criterion for the exclusion of date based versions -* implicitly exclude pre-releases unless explicitly requested +* Implicitly exclude pre-releases unless they're already present or + needed to satisfy a dependency -* treat post releases the same way as unqualified releases +* Treat post releases the same way as unqualified releases * Discuss ordering and dependencies across metadata versions The rationale for major changes is given in the following sections. -Adding build labels -------------------- +Adding source labels +-------------------- -The new build label support is intended to make it clearer that the +The new source label support is intended to make it clearer that the constraints on public version identifiers are there primarily to aid in the creation of reliable automated dependency analysis tools. Projects are free to use whatever versioning scheme they like internally, so long @@ -1011,11 +1007,12 @@ specifiers for no adequately justified reason. The updated interpretation is intended to make it difficult to accidentally -accept a pre-release version as satisfying a dependency, while allowing -pre-release versions to be explicitly requested when needed. +accept a pre-release version as satisfying a dependency, while still +allowing pre-release versions to be retrieved automatically when that's the +only way to satisfy a dependency. The "some forward compatibility assumed" default version constraint is -taken directly from the Ruby community's "pessimistic version constraint" +derived from the Ruby community's "pessimistic version constraint" operator [2]_ to allow projects to take a cautious approach to forward compatibility promises, while still easily setting a minimum required version for their dependencies. It is made the default behaviour rather @@ -1038,16 +1035,26 @@ The trailing wildcard syntax to request prefix based version matching was added to make it possible to sensibly define both compatible release clauses -and the desired pre-release handling semantics for ``<`` and ``>`` ordered -comparison clauses. +and the desired pre- and post-release handling semantics for ``<`` and ``>`` +ordered comparison clauses. -Build references are added for two purposes. In conjunction with build -labels, they allow hash based references, such as those employed by -`hashdist `__, -or generated from version control. In conjunction with build URLs, they -allow the new metadata standard to natively support an existing feature of -``pip``, which allows arbitrary URLs like -``file:///localbuilds/exampledist-1.0-py33-none-any.whl``. + +Adding direct references +------------------------ + +Direct references are added as an "escape clause" to handle messy real +world situations that don't map neatly to the standard distribution model. +This includes dependencies on unpublished software for internal use, as well +as handling the more complex compatibility issues that may arise when +wrapping third party libraries as C extensions (this is of especial concern +to the scientific community). + +Index servers are deliberately given a lot of freedom to disallow direct +references, since they're intended primarily as a tool for integrators +rather than publishers. PyPI in particular is currently going through the +process of *eliminating* dependencies on external references, as unreliable +external services have the effect of slowing down installation operations, +as well as reducing PyPI's own apparent reliability. References diff --git a/pep-0442.txt b/pep-0442.txt --- a/pep-0442.txt +++ b/pep-0442.txt @@ -4,13 +4,13 @@ Last-Modified: $Date$ Author: Antoine Pitrou BDFL-Delegate: Benjamin Peterson -Status: Draft +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 2013-05-18 Python-Version: 3.4 Post-History: 2013-05-18 -Resolution: TBD +Resolution: http://mail.python.org/pipermail/python-dev/2013-June/126746.html Abstract @@ -201,8 +201,7 @@ -------------- Following this scheme, an object's finalizer is always called exactly -once. The only exception is if an object is resurrected: the finalizer -will be called again when the object becomes unreachable again. +once, even if it was resurrected afterwards. For CI objects, the order in which finalizers are called (step 2 above) is undefined. diff --git a/pep-0443.txt b/pep-0443.txt --- a/pep-0443.txt +++ b/pep-0443.txt @@ -4,11 +4,11 @@ Last-Modified: $Date$ Author: ?ukasz Langa Discussions-To: Python-Dev -Status: Draft +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 22-May-2013 -Post-History: 22-May-2013, 25-May-2013 +Post-History: 22-May-2013, 25-May-2013, 31-May-2013 Replaces: 245, 246, 3124 @@ -44,11 +44,14 @@ In addition, it is currently a common anti-pattern for Python code to inspect the types of received arguments, in order to decide what to do -with the objects. For example, code may wish to accept either an object +with the objects. + +For example, code may wish to accept either an object of some type, or a sequence of objects of that type. +Currently, the "obvious way" to do this is by type inspection, but this +is brittle and closed to extension. -Currently, the "obvious way" to do this is by type inspection, but this -is brittle and closed to extension. Abstract Base Classes make it easier +Abstract Base Classes make it easier to discover present behaviour, but don't help adding new behaviour. A developer using an already-written library may be unable to change how their objects are treated by such code, especially if the objects they @@ -63,7 +66,7 @@ To define a generic function, decorate it with the ``@singledispatch`` decorator. Note that the dispatch happens on the type of the first -argument, create your function accordingly:: +argument. Create your function accordingly:: >>> from functools import singledispatch >>> @singledispatch @@ -73,7 +76,7 @@ ... print(arg) To add overloaded implementations to the function, use the -``register()`` attribute of the generic function. It is a decorator, +``register()`` attribute of the generic function. This is a decorator, taking a type parameter and decorating a function implementing the operation for that type:: @@ -98,7 +101,7 @@ ... >>> fun.register(type(None), nothing) -The ``register()`` attribute returns the undecorated function which +The ``register()`` attribute returns the undecorated function. This enables decorator stacking, pickling, as well as creating unit tests for each variant independently:: @@ -134,13 +137,17 @@ Where there is no registered implementation for a specific type, its method resolution order is used to find a more generic implementation. +The original function decorated with ``@singledispatch`` is registered +for the base ``object`` type, which means it is used if no better +implementation is found. + To check which implementation will the generic function choose for a given type, use the ``dispatch()`` attribute:: >>> fun.dispatch(float) - >>> fun.dispatch(dict) - + >>> fun.dispatch(dict) # note: default implementation + To access all registered implementations, use the read-only ``registry`` attribute:: @@ -152,7 +159,7 @@ >>> fun.registry[float] >>> fun.registry[object] - + The proposed API is intentionally limited and opinionated, as to ensure it is easy to explain and use, as well as to maintain consistency with @@ -168,12 +175,12 @@ reference implementation is available on hg.python.org [#ref-impl]_. The dispatch type is specified as a decorator argument. An alternative -form using function annotations has been considered but its inclusion -has been deferred. As of May 2013, this usage pattern is out of scope -for the standard library [#pep-0008]_ and the best practices for +form using function annotations was considered but its inclusion +has been rejected. As of May 2013, this usage pattern is out of scope +for the standard library [#pep-0008]_, and the best practices for annotation usage are still debated. -Based on the current ``pkgutil.simplegeneric`` implementation and +Based on the current ``pkgutil.simplegeneric`` implementation, and following the convention on registering virtual subclasses on Abstract Base Classes, the dispatch registry will not be thread-safe. @@ -186,48 +193,37 @@ importantly, it introduces support for Abstract Base Classes (ABC). When a generic function implementation is registered for an ABC, the -dispatch algorithm switches to a mode of MRO calculation for the -provided argument which includes the relevant ABCs. The algorithm is as -follows:: +dispatch algorithm switches to an extended form of C3 linearization, +which includes the relevant ABCs in the MRO of the provided argument. +The algorithm inserts ABCs where their functionality is introduced, i.e. +``issubclass(cls, abc)`` returns ``True`` for the class itself but +returns ``False`` for all its direct base classes. Implicit ABCs for +a given class (either registered or inferred from the presence of +a special method like ``__len__()``) are inserted directly after the +last ABC explicitly listed in the MRO of said class. - def _compose_mro(cls, haystack): - """Calculates the MRO for a given class `cls`, including relevant - abstract base classes from `haystack`.""" - bases = set(cls.__mro__) - mro = list(cls.__mro__) - for regcls in haystack: - if regcls in bases or not issubclass(cls, regcls): - continue # either present in the __mro__ or unrelated - for index, base in enumerate(mro): - if not issubclass(base, regcls): - break - if base in bases and not issubclass(regcls, base): - # Conflict resolution: put classes present in __mro__ - # and their subclasses first. - index += 1 - mro.insert(index, regcls) - return mro - -In its most basic form, it returns the MRO for the given type:: +In its most basic form, this linearization returns the MRO for the given +type:: >>> _compose_mro(dict, []) [, ] -When the haystack consists of ABCs that the specified type is a subclass -of, they are inserted in a predictable order:: +When the second argument contains ABCs that the specified type is +a subclass of, they are inserted in a predictable order:: >>> _compose_mro(dict, [Sized, MutableMapping, str, ... Sequence, Iterable]) [, , - , , + , , + , , ] While this mode of operation is significantly slower, all dispatch decisions are cached. The cache is invalidated on registering new implementations on the generic function or when user code calls -``register()`` on an ABC to register a new virtual subclass. In the -latter case, it is possible to create a situation with ambiguous -dispatch, for instance:: +``register()`` on an ABC to implicitly subclass it. In the latter case, +it is possible to create a situation with ambiguous dispatch, for +instance:: >>> from collections import Iterable, Container >>> class P: @@ -254,27 +250,45 @@ RuntimeError: Ambiguous dispatch: or -Note that this exception would not be raised if ``Iterable`` and -``Container`` had been provided as base classes during class definition. -In this case dispatch happens in the MRO order:: +Note that this exception would not be raised if one or more ABCs had +been provided explicitly as base classes during class definition. In +this case dispatch happens in the MRO order:: >>> class Ten(Iterable, Container): ... def __iter__(self): ... for i in range(10): ... yield i ... def __contains__(self, value): - ... return value in range(10) + ... return value in range(10) ... >>> g(Ten()) 'iterable' +A similar conflict arises when subclassing an ABC is inferred from the +presence of a special method like ``__len__()`` or ``__contains__()``:: + + >>> class Q: + ... def __contains__(self, value): + ... return False + ... + >>> issubclass(Q, Container) + True + >>> Iterable.register(Q) + >>> g(Q()) + Traceback (most recent call last): + ... + RuntimeError: Ambiguous dispatch: + or + +An early version of the PEP contained a custom approach that was simpler +but created a number of edge cases with surprising results [#why-c3]_. Usage Patterns ============== This PEP proposes extending behaviour only of functions specifically marked as generic. Just as a base class method may be overridden by -a subclass, so too may a function be overloaded to provide custom +a subclass, so too a function may be overloaded to provide custom functionality for a given type. Universal overloading does not equal *arbitrary* overloading, in the @@ -371,6 +385,8 @@ a particular annotation style". (http://www.python.org/dev/peps/pep-0008) +.. [#why-c3] http://bugs.python.org/issue18244 + .. [#pep-3124] http://www.python.org/dev/peps/pep-3124/ .. [#peak-rules] http://peak.telecommunity.com/DevCenter/PEAK_2dRules diff --git a/pep-0445.txt b/pep-0445.txt new file mode 100644 --- /dev/null +++ b/pep-0445.txt @@ -0,0 +1,773 @@ +PEP: 445 +Title: Add new APIs to customize Python memory allocators +Version: $Revision$ +Last-Modified: $Date$ +Author: Victor Stinner +BDFL-Delegate: Antoine Pitrou +Status: Accepted +Type: Standards Track +Content-Type: text/x-rst +Created: 15-june-2013 +Python-Version: 3.4 +Resolution: http://mail.python.org/pipermail/python-dev/2013-July/127222.html + +Abstract +======== + +This PEP proposes new Application Programming Interfaces (API) to customize +Python memory allocators. The only implementation required to conform to +this PEP is CPython, but other implementations may choose to be compatible, +or to re-use a similar scheme. + + +Rationale +========= + +Use cases: + +* Applications embedding Python which want to isolate Python memory from + the memory of the application, or want to use a different memory + allocator optimized for its Python usage +* Python running on embedded devices with low memory and slow CPU. + A custom memory allocator can be used for efficiency and/or to get + access all the memory of the device. +* Debug tools for memory allocators: + + - track the memory usage (find memory leaks) + - get the location of a memory allocation: Python filename and line + number, and the size of a memory block + - detect buffer underflow, buffer overflow and misuse of Python + allocator APIs (see `Redesign Debug Checks on Memory Block + Allocators as Hooks`_) + - force memory allocations to fail to test handling of the + ``MemoryError`` exception + + +Proposal +======== + +New Functions and Structures +---------------------------- + +* Add a new GIL-free (no need to hold the GIL) memory allocator: + + - ``void* PyMem_RawMalloc(size_t size)`` + - ``void* PyMem_RawRealloc(void *ptr, size_t new_size)`` + - ``void PyMem_RawFree(void *ptr)`` + - The newly allocated memory will not have been initialized in any + way. + - Requesting zero bytes returns a distinct non-*NULL* pointer if + possible, as if ``PyMem_Malloc(1)`` had been called instead. + +* Add a new ``PyMemAllocator`` structure:: + + typedef struct { + /* user context passed as the first argument to the 3 functions */ + void *ctx; + + /* allocate a memory block */ + void* (*malloc) (void *ctx, size_t size); + + /* allocate or resize a memory block */ + void* (*realloc) (void *ctx, void *ptr, size_t new_size); + + /* release a memory block */ + void (*free) (void *ctx, void *ptr); + } PyMemAllocator; + +* Add a new ``PyMemAllocatorDomain`` enum to choose the Python + allocator domain. Domains: + + - ``PYMEM_DOMAIN_RAW``: ``PyMem_RawMalloc()``, ``PyMem_RawRealloc()`` + and ``PyMem_RawFree()`` + + - ``PYMEM_DOMAIN_MEM``: ``PyMem_Malloc()``, ``PyMem_Realloc()`` and + ``PyMem_Free()`` + + - ``PYMEM_DOMAIN_OBJ``: ``PyObject_Malloc()``, ``PyObject_Realloc()`` + and ``PyObject_Free()`` + +* Add new functions to get and set memory block allocators: + + - ``void PyMem_GetAllocator(PyMemAllocatorDomain domain, PyMemAllocator *allocator)`` + - ``void PyMem_SetAllocator(PyMemAllocatorDomain domain, PyMemAllocator *allocator)`` + - The new allocator must return a distinct non-*NULL* pointer when + requesting zero bytes + - For the ``PYMEM_DOMAIN_RAW`` domain, the allocator must be + thread-safe: the GIL is not held when the allocator is called. + +* Add a new ``PyObjectArenaAllocator`` structure:: + + typedef struct { + /* user context passed as the first argument to the 2 functions */ + void *ctx; + + /* allocate an arena */ + void* (*alloc) (void *ctx, size_t size); + + /* release an arena */ + void (*free) (void *ctx, void *ptr, size_t size); + } PyObjectArenaAllocator; + +* Add new functions to get and set the arena allocator used by + *pymalloc*: + + - ``void PyObject_GetArenaAllocator(PyObjectArenaAllocator *allocator)`` + - ``void PyObject_SetArenaAllocator(PyObjectArenaAllocator *allocator)`` + +* Add a new function to reinstall the debug checks on memory allocators when + a memory allocator is replaced with ``PyMem_SetAllocator()``: + + - ``void PyMem_SetupDebugHooks(void)`` + - Install the debug hooks on all memory block allocators. The function can be + called more than once, hooks are only installed once. + - The function does nothing is Python is not compiled in debug mode. + +* Memory block allocators always return *NULL* if *size* is greater than + ``PY_SSIZE_T_MAX``. The check is done before calling the inner + function. + +.. note:: + The *pymalloc* allocator is optimized for objects smaller than 512 bytes + with a short lifetime. It uses memory mappings with a fixed size of 256 + KB called "arenas". + +Here is how the allocators are set up by default: + +* ``PYMEM_DOMAIN_RAW``, ``PYMEM_DOMAIN_MEM``: ``malloc()``, + ``realloc()`` and ``free()``; call ``malloc(1)`` when requesting zero + bytes +* ``PYMEM_DOMAIN_OBJ``: *pymalloc* allocator which falls back on + ``PyMem_Malloc()`` for allocations larger than 512 bytes +* *pymalloc* arena allocator: ``VirtualAlloc()`` and ``VirtualFree()`` on + Windows, ``mmap()`` and ``munmap()`` when available, or ``malloc()`` + and ``free()`` + + +Redesign Debug Checks on Memory Block Allocators as Hooks +--------------------------------------------------------- + +Since Python 2.3, Python implements different checks on memory +allocators in debug mode: + +* Newly allocated memory is filled with the byte ``0xCB``, freed memory + is filled with the byte ``0xDB``. +* Detect API violations, ex: ``PyObject_Free()`` called on a memory + block allocated by ``PyMem_Malloc()`` +* Detect write before the start of the buffer (buffer underflow) +* Detect write after the end of the buffer (buffer overflow) + +In Python 3.3, the checks are installed by replacing ``PyMem_Malloc()``, +``PyMem_Realloc()``, ``PyMem_Free()``, ``PyObject_Malloc()``, +``PyObject_Realloc()`` and ``PyObject_Free()`` using macros. The new +allocator allocates a larger buffer and writes a pattern to detect buffer +underflow, buffer overflow and use after free (by filling the buffer with +the byte ``0xDB``). It uses the original ``PyObject_Malloc()`` +function to allocate memory. So ``PyMem_Malloc()`` and +``PyMem_Realloc()`` indirectly call``PyObject_Malloc()`` and +``PyObject_Realloc()``. + +This PEP redesigns the debug checks as hooks on the existing allocators +in debug mode. Examples of call traces without the hooks: + +* ``PyMem_RawMalloc()`` => ``_PyMem_RawMalloc()`` => ``malloc()`` +* ``PyMem_Realloc()`` => ``_PyMem_RawRealloc()`` => ``realloc()`` +* ``PyObject_Free()`` => ``_PyObject_Free()`` + +Call traces when the hooks are installed (debug mode): + +* ``PyMem_RawMalloc()`` => ``_PyMem_DebugMalloc()`` + => ``_PyMem_RawMalloc()`` => ``malloc()`` +* ``PyMem_Realloc()`` => ``_PyMem_DebugRealloc()`` + => ``_PyMem_RawRealloc()`` => ``realloc()`` +* ``PyObject_Free()`` => ``_PyMem_DebugFree()`` + => ``_PyObject_Free()`` + +As a result, ``PyMem_Malloc()`` and ``PyMem_Realloc()`` now call +``malloc()`` and ``realloc()`` in both release mode and debug mode, +instead of calling ``PyObject_Malloc()`` and ``PyObject_Realloc()`` in +debug mode. + +When at least one memory allocator is replaced with +``PyMem_SetAllocator()``, the ``PyMem_SetupDebugHooks()`` function must +be called to reinstall the debug hooks on top on the new allocator. + + +Don't call malloc() directly anymore +------------------------------------ + +``PyObject_Malloc()`` falls back on ``PyMem_Malloc()`` instead of +``malloc()`` if size is greater or equal than 512 bytes, and +``PyObject_Realloc()`` falls back on ``PyMem_Realloc()`` instead of +``realloc()`` + +Direct calls to ``malloc()`` are replaced with ``PyMem_Malloc()``, or +``PyMem_RawMalloc()`` if the GIL is not held. + +External libraries like zlib or OpenSSL can be configured to allocate memory +using ``PyMem_Malloc()`` or ``PyMem_RawMalloc()``. If the allocator of a +library can only be replaced globally (rather than on an object-by-object +basis), it shouldn't be replaced when Python is embedded in an application. + +For the "track memory usage" use case, it is important to track memory +allocated in external libraries to have accurate reports, because these +allocations can be large (e.g. they can raise a ``MemoryError`` exception) +and would otherwise be missed in memory usage reports. + + +Examples +======== + +Use case 1: Replace Memory Allocators, keep pymalloc +---------------------------------------------------- + +Dummy example wasting 2 bytes per memory block, +and 10 bytes per *pymalloc* arena:: + + #include + + size_t alloc_padding = 2; + size_t arena_padding = 10; + + void* my_malloc(void *ctx, size_t size) + { + int padding = *(int *)ctx; + return malloc(size + padding); + } + + void* my_realloc(void *ctx, void *ptr, size_t new_size) + { + int padding = *(int *)ctx; + return realloc(ptr, new_size + padding); + } + + void my_free(void *ctx, void *ptr) + { + free(ptr); + } + + void* my_alloc_arena(void *ctx, size_t size) + { + int padding = *(int *)ctx; + return malloc(size + padding); + } + + void my_free_arena(void *ctx, void *ptr, size_t size) + { + free(ptr); + } + + void setup_custom_allocator(void) + { + PyMemAllocator alloc; + PyObjectArenaAllocator arena; + + alloc.ctx = &alloc_padding; + alloc.malloc = my_malloc; + alloc.realloc = my_realloc; + alloc.free = my_free; + + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &alloc); + PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &alloc); + /* leave PYMEM_DOMAIN_OBJ unchanged, use pymalloc */ + + arena.ctx = &arena_padding; + arena.alloc = my_alloc_arena; + arena.free = my_free_arena; + PyObject_SetArenaAllocator(&arena); + + PyMem_SetupDebugHooks(); + } + + +Use case 2: Replace Memory Allocators, override pymalloc +-------------------------------------------------------- + +If you have a dedicated allocator optimized for allocations of objects +smaller than 512 bytes with a short lifetime, pymalloc can be overriden +(replace ``PyObject_Malloc()``). + +Dummy example wasting 2 bytes per memory block:: + + #include + + size_t padding = 2; + + void* my_malloc(void *ctx, size_t size) + { + int padding = *(int *)ctx; + return malloc(size + padding); + } + + void* my_realloc(void *ctx, void *ptr, size_t new_size) + { + int padding = *(int *)ctx; + return realloc(ptr, new_size + padding); + } + + void my_free(void *ctx, void *ptr) + { + free(ptr); + } + + void setup_custom_allocator(void) + { + PyMemAllocator alloc; + alloc.ctx = &padding; + alloc.malloc = my_malloc; + alloc.realloc = my_realloc; + alloc.free = my_free; + + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &alloc); + PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &alloc); + PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &alloc); + + PyMem_SetupDebugHooks(); + } + +The *pymalloc* arena does not need to be replaced, because it is no more +used by the new allocator. + + +Use case 3: Setup Hooks On Memory Block Allocators +-------------------------------------------------- + +Example to setup hooks on all memory block allocators:: + + struct { + PyMemAllocator raw; + PyMemAllocator mem; + PyMemAllocator obj; + /* ... */ + } hook; + + static void* hook_malloc(void *ctx, size_t size) + { + PyMemAllocator *alloc = (PyMemAllocator *)ctx; + void *ptr; + /* ... */ + ptr = alloc->malloc(alloc->ctx, size); + /* ... */ + return ptr; + } + + static void* hook_realloc(void *ctx, void *ptr, size_t new_size) + { + PyMemAllocator *alloc = (PyMemAllocator *)ctx; + void *ptr2; + /* ... */ + ptr2 = alloc->realloc(alloc->ctx, ptr, new_size); + /* ... */ + return ptr2; + } + + static void hook_free(void *ctx, void *ptr) + { + PyMemAllocator *alloc = (PyMemAllocator *)ctx; + /* ... */ + alloc->free(alloc->ctx, ptr); + /* ... */ + } + + void setup_hooks(void) + { + PyMemAllocator alloc; + static int installed = 0; + + if (installed) + return; + installed = 1; + + alloc.malloc = hook_malloc; + alloc.realloc = hook_realloc; + alloc.free = hook_free; + PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &hook.raw); + PyMem_GetAllocator(PYMEM_DOMAIN_MEM, &hook.mem); + PyMem_GetAllocator(PYMEM_DOMAIN_OBJ, &hook.obj); + + alloc.ctx = &hook.raw; + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &alloc); + + alloc.ctx = &hook.mem; + PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &alloc); + + alloc.ctx = &hook.obj; + PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &alloc); + } + +.. note:: + ``PyMem_SetupDebugHooks()`` does not need to be called because + memory allocator are not replaced: the debug checks on memory + block allocators are installed automatically at startup. + + +Performances +============ + +The implementation of this PEP (issue #3329) has no visible overhead on +the Python benchmark suite. + +Results of the `Python benchmarks suite +`_ (-b 2n3): some tests are 1.04x +faster, some tests are 1.04 slower. Results of pybench microbenchmark: +"+0.1%" slower globally (diff between -4.9% and +5.6%). + +The full output of benchmarks is attached to the issue #3329. + + +Rejected Alternatives +===================== + +More specific functions to get/set memory allocators +---------------------------------------------------- + +It was originally proposed a larger set of C API functions, with one pair +of functions for each allocator domain: + +* ``void PyMem_GetRawAllocator(PyMemAllocator *allocator)`` +* ``void PyMem_GetAllocator(PyMemAllocator *allocator)`` +* ``void PyObject_GetAllocator(PyMemAllocator *allocator)`` +* ``void PyMem_SetRawAllocator(PyMemAllocator *allocator)`` +* ``void PyMem_SetAllocator(PyMemAllocator *allocator)`` +* ``void PyObject_SetAllocator(PyMemAllocator *allocator)`` + +This alternative was rejected because it is not possible to write +generic code with more specific functions: code must be duplicated for +each memory allocator domain. + + +Make PyMem_Malloc() reuse PyMem_RawMalloc() by default +------------------------------------------------------ + +If ``PyMem_Malloc()`` called ``PyMem_RawMalloc()`` by default, +calling ``PyMem_SetAllocator(PYMEM_DOMAIN_RAW, alloc)`` would also +patch ``PyMem_Malloc()`` indirectly. + +This alternative was rejected because ``PyMem_SetAllocator()`` would +have a different behaviour depending on the domain. Always having the +same behaviour is less error-prone. + + +Add a new PYDEBUGMALLOC environment variable +-------------------------------------------- + +It was proposed to add a new ``PYDEBUGMALLOC`` environment variable to +enable debug checks on memory block allocators. It would have had the same +effect as calling the ``PyMem_SetupDebugHooks()``, without the need +to write any C code. Another advantage is to allow to enable debug checks +even in release mode: debug checks would always be compiled in, but only +enabled when the environment variable is present and non-empty. + +This alternative was rejected because a new environment variable would +make Python initialization even more complex. `PEP 432 +`_ tries to simplify the +CPython startup sequence. + + +Use macros to get customizable allocators +----------------------------------------- + +To have no overhead in the default configuration, customizable +allocators would be an optional feature enabled by a configuration +option or by macros. + +This alternative was rejected because the use of macros implies having +to recompile extensions modules to use the new allocator and allocator +hooks. Not having to recompile Python nor extension modules makes debug +hooks easier to use in practice. + + +Pass the C filename and line number +----------------------------------- + +Define allocator functions as macros using ``__FILE__`` and ``__LINE__`` +to get the C filename and line number of a memory allocation. + +Example of ``PyMem_Malloc`` macro with the modified +``PyMemAllocator`` structure:: + + typedef struct { + /* user context passed as the first argument + to the 3 functions */ + void *ctx; + + /* allocate a memory block */ + void* (*malloc) (void *ctx, const char *filename, int lineno, + size_t size); + + /* allocate or resize a memory block */ + void* (*realloc) (void *ctx, const char *filename, int lineno, + void *ptr, size_t new_size); + + /* release a memory block */ + void (*free) (void *ctx, const char *filename, int lineno, + void *ptr); + } PyMemAllocator; + + void* _PyMem_MallocTrace(const char *filename, int lineno, + size_t size); + + /* the function is still needed for the Python stable ABI */ + void* PyMem_Malloc(size_t size); + + #define PyMem_Malloc(size) \ + _PyMem_MallocTrace(__FILE__, __LINE__, size) + +The GC allocator functions would also have to be patched. For example, +``_PyObject_GC_Malloc()`` is used in many C functions and so objects of +different types would have the same allocation location. + +This alternative was rejected because passing a filename and a line +number to each allocator makes the API more complex: pass 3 new +arguments (ctx, filename, lineno) to each allocator function, instead of +just a context argument (ctx). Having to also modify GC allocator +functions adds too much complexity for a little gain. + + +GIL-free PyMem_Malloc() +----------------------- + +In Python 3.3, when Python is compiled in debug mode, ``PyMem_Malloc()`` +indirectly calls ``PyObject_Malloc()`` which requires the GIL to be +held (it isn't thread-safe). That's why ``PyMem_Malloc()`` must be called +with the GIL held. + +This PEP changes ``PyMem_Malloc()``: it now always calls ``malloc()`` +rather than ``PyObject_Malloc()``. The "GIL must be held" restriction +could therefore be removed from ``PyMem_Malloc()``. + +This alternative was rejected because allowing to call +``PyMem_Malloc()`` without holding the GIL can break applications +which setup their own allocators or allocator hooks. Holding the GIL is +convenient to develop a custom allocator: no need to care about other +threads. It is also convenient for a debug allocator hook: Python +objects can be safely inspected, and the C API may be used for reporting. + +Moreover, calling ``PyGILState_Ensure()`` in a memory allocator has +unexpected behaviour, especially at Python startup and when creating of a +new Python thread state. It is better to free custom allocators of +the responsibility of acquiring the GIL. + + +Don't add PyMem_RawMalloc() +--------------------------- + +Replace ``malloc()`` with ``PyMem_Malloc()``, but only if the GIL is +held. Otherwise, keep ``malloc()`` unchanged. + +The ``PyMem_Malloc()`` is used without the GIL held in some Python +functions. For example, the ``main()`` and ``Py_Main()`` functions of +Python call ``PyMem_Malloc()`` whereas the GIL do not exist yet. In this +case, ``PyMem_Malloc()`` would be replaced with ``malloc()`` (or +``PyMem_RawMalloc()``). + +This alternative was rejected because ``PyMem_RawMalloc()`` is required +for accurate reports of the memory usage. When a debug hook is used to +track the memory usage, the memory allocated by direct calls to +``malloc()`` cannot be tracked. ``PyMem_RawMalloc()`` can be hooked and +so all the memory allocated by Python can be tracked, including +memory allocated without holding the GIL. + + +Use existing debug tools to analyze memory use +---------------------------------------------- + +There are many existing debug tools to analyze memory use. Some +examples: `Valgrind `_, `Purify +`_, `Clang AddressSanitizer +`_, `failmalloc +`_, etc. + +The problem is to retrieve the Python object related to a memory pointer +to read its type and/or its content. Another issue is to retrieve the +source of the memory allocation: the C backtrace is usually useless +(same reasoning than macros using ``__FILE__`` and ``__LINE__``, see +`Pass the C filename and line number`_), the Python filename and line +number (or even the Python traceback) is more useful. + +This alternative was rejected because classic tools are unable to +introspect Python internals to collect such information. Being able to +setup a hook on allocators called with the GIL held allows to collect a +lot of useful data from Python internals. + + +Add a msize() function +---------------------- + +Add another function to ``PyMemAllocator`` and +``PyObjectArenaAllocator`` structures:: + + size_t msize(void *ptr); + +This function returns the size of a memory block or a memory mapping. +Return (size_t)-1 if the function is not implemented or if the pointer +is unknown (ex: NULL pointer). + +On Windows, this function can be implemented using ``_msize()`` and +``VirtualQuery()``. + +The function can be used to implement a hook tracking the memory usage. +The ``free()`` method of an allocator only gets the address of a memory +block, whereas the size of the memory block is required to update the +memory usage. + +The additional ``msize()`` function was rejected because only few +platforms implement it. For example, Linux with the GNU libc does not +provide a function to get the size of a memory block. ``msize()`` is not +currently used in the Python source code. The function would only be +used to track memory use, and make the API more complex. A debug hook +can implement the function internally, there is no need to add it to +``PyMemAllocator`` and ``PyObjectArenaAllocator`` structures. + + +No context argument +------------------- + +Simplify the signature of allocator functions, remove the context +argument: + +* ``void* malloc(size_t size)`` +* ``void* realloc(void *ptr, size_t new_size)`` +* ``void free(void *ptr)`` + +It is likely for an allocator hook to be reused for +``PyMem_SetAllocator()`` and ``PyObject_SetAllocator()``, or even +``PyMem_SetRawAllocator()``, but the hook must call a different function +depending on the allocator. The context is a convenient way to reuse the +same custom allocator or hook for different Python allocators. + +In C++, the context can be used to pass *this*. + + +External Libraries +================== + +Examples of API used to customize memory allocators. + +Libraries used by Python: + +* OpenSSL: `CRYPTO_set_mem_functions() + `_ + to set memory management functions globally +* expat: `parserCreate() + `_ + has a per-instance memory handler +* zlib: `zlib 1.2.8 Manual `_, + pass an opaque pointer +* bz2: `bzip2 and libbzip2, version 1.0.5 + `_, + pass an opaque pointer +* lzma: `LZMA SDK - How to Use + `_, + pass an opaque pointer +* lipmpdec: no opaque pointer (classic malloc API) + +Other libraries: + +* glib: `g_mem_set_vtable() + `_ +* libxml2: + `xmlGcMemSetup() `_, + global +* Oracle's OCI: `Oracle Call Interface Programmer's Guide, + Release 2 (9.2) + `_, + pass an opaque pointer + +The new *ctx* parameter of this PEP was inspired by the API of zlib and +Oracle's OCI libraries. + +See also the `GNU libc: Memory Allocation Hooks +`_ +which uses a different approach to hook memory allocators. + + +Memory Allocators +================= + +The C standard library provides the well known ``malloc()`` function. +Its implementation depends on the platform and of the C library. The GNU +C library uses a modified ptmalloc2, based on "Doug Lea's Malloc" +(dlmalloc). FreeBSD uses `jemalloc +`_. Google provides *tcmalloc* which +is part of `gperftools `_. + +``malloc()`` uses two kinds of memory: heap and memory mappings. Memory +mappings are usually used for large allocations (ex: larger than 256 +KB), whereas the heap is used for small allocations. + +On UNIX, the heap is handled by ``brk()`` and ``sbrk()`` system calls, +and it is contiguous. On Windows, the heap is handled by +``HeapAlloc()`` and can be discontiguous. Memory mappings are handled by +``mmap()`` on UNIX and ``VirtualAlloc()`` on Windows, they can be +discontiguous. + +Releasing a memory mapping gives back immediatly the memory to the +system. On UNIX, the heap memory is only given back to the system if the +released block is located at the end of the heap. Otherwise, the memory +will only be given back to the system when all the memory located after +the released memory is also released. + +To allocate memory on the heap, an allocator tries to reuse free space. +If there is no contiguous space big enough, the heap must be enlarged, +even if there is more free space than required size. This issue is +called the "memory fragmentation": the memory usage seen by the system +is higher than real usage. On Windows, ``HeapAlloc()`` creates +a new memory mapping with ``VirtualAlloc()`` if there is not enough free +contiguous memory. + +CPython has a *pymalloc* allocator for allocations smaller than 512 +bytes. This allocator is optimized for small objects with a short +lifetime. It uses memory mappings called "arenas" with a fixed size of +256 KB. + +Other allocators: + +* Windows provides a `Low-fragmentation Heap + `_. + +* The Linux kernel uses `slab allocation + `_. + +* The glib library has a `Memory Slice API + `_: + efficient way to allocate groups of equal-sized chunks of memory + +This PEP allows to choose exactly which memory allocator is used for your +application depending on its usage of the memory (number of allocations, +size of allocations, lifetime of objects, etc.). + + +Links +===== + +CPython issues related to memory allocation: + +* `Issue #3329: Add new APIs to customize memory allocators + `_ +* `Issue #13483: Use VirtualAlloc to allocate memory arenas + `_ +* `Issue #16742: PyOS_Readline drops GIL and calls PyOS_StdioReadline, + which isn't thread safe `_ +* `Issue #18203: Replace calls to malloc() with PyMem_Malloc() or + PyMem_RawMalloc() `_ +* `Issue #18227: Use Python memory allocators in external libraries like + zlib or OpenSSL `_ + +Projects analyzing the memory usage of Python applications: + +* `pytracemalloc + `_ +* `Meliae: Python Memory Usage Analyzer + `_ +* `Guppy-PE: umbrella package combining Heapy and GSL + `_ +* `PySizer (developed for Python 2.4) + `_ + + +Copyright +========= + +This document has been placed into the public domain. + diff --git a/pep-0446.txt b/pep-0446.txt new file mode 100644 --- /dev/null +++ b/pep-0446.txt @@ -0,0 +1,248 @@ +PEP: 446 +Title: Add new parameters to configure the inheritance of files and for non-blocking sockets +Version: $Revision$ +Last-Modified: $Date$ +Author: Victor Stinner +Status: Draft +Type: Standards Track +Content-Type: text/x-rst +Created: 3-July-2013 +Python-Version: 3.4 + + +Abstract +======== + +This PEP proposes new portable parameters and functions to configure the +inheritance of file descriptors and the non-blocking flag of sockets. + + +Rationale +========= + +Inheritance of file descriptors +------------------------------- + +The inheritance of file descriptors in child processes can be configured +on each file descriptor using a *close-on-exec* flag. By default, the +close-on-exec flag is not set. + +On Windows, the close-on-exec flag is the inverse of ``HANDLE_FLAG_INHERIT``. File +descriptors are not inherited if the ``bInheritHandles`` parameter of +the ``CreateProcess()`` function is ``FALSE``, even if the +``HANDLE_FLAG_INHERIT`` flag is set. If ``bInheritHandles`` is ``TRUE``, +only file descriptors with ``HANDLE_FLAG_INHERIT`` flag set are +inherited, others are not. + +On UNIX, the close-on-exec flag is ``O_CLOEXEC``. File descriptors with +the ``O_CLOEXEC`` flag set are closed at the execution of a new program +(ex: when calling ``execv()``). + +The ``O_CLOEXEC`` flag has no effect on ``fork()``, all file descriptors +are inherited by the child process. Futhermore, most properties file +descriptors are shared between the parent and the child processes, +except file attributes which are duplicated (``O_CLOEXEC`` is the only +file attribute). Setting ``O_CLOEXEC`` flag of a file descriptor in the +child process does not change the ``O_CLOEXEC`` flag of the file +descriptor in the parent process. + + +Issues of the inheritance of file descriptors +--------------------------------------------- + +Inheritance of file descriptors causes issues. For example, closing a +file descriptor in the parent process does not release the resource +(file, socket, ...), because the file descriptor is still open in the +child process. + +Leaking file descriptors is also a major security vulnerability. An +untrusted child process can read sensitive data like passwords and take +control of the parent process though leaked file descriptors. It is for +example a known vulnerability to escape from a chroot. + + +Non-blocking sockets +-------------------- + +To handle multiple network clients in a single thread, a multiplexing +function like ``select()`` can be used. For best performances, sockets +must be configured as non-blocking. Operations like ``send()`` and +``recv()`` return an ``EAGAIN`` or ``EWOULDBLOCK`` error if the +operation would block. + +By default, newly created sockets are blocking. Setting the non-blocking +mode requires additional system calls. + +On UNIX, the blocking flag is ``O_NONBLOCK``: a pipe and a socket are +non-blocking if the ``O_NONBLOCK`` flag is set. + + +Setting flags at the creation of the file descriptor +---------------------------------------------------- + +Windows and recent versions of other operating systems like Linux +support setting the close-on-exec flag directly at the creation of file +descriptors, and close-on-exec and blocking flags at the creation of +sockets. + +Setting these flags at the creation is atomic and avoids additional +system calls. + + +Proposal +======== + +New cloexec And blocking Parameters +----------------------------------- + +Add a new optional *cloexec* on functions creating file descriptors: + +* ``io.FileIO`` +* ``io.open()`` +* ``open()`` +* ``os.dup()`` +* ``os.dup2()`` +* ``os.fdopen()`` +* ``os.open()`` +* ``os.openpty()`` +* ``os.pipe()`` +* ``select.devpoll()`` +* ``select.epoll()`` +* ``select.kqueue()`` + +Add new optional *cloexec* and *blocking* parameters to functions +creating sockets: + +* ``asyncore.dispatcher.create_socket()`` +* ``socket.socket()`` +* ``socket.socket.accept()`` +* ``socket.socket.dup()`` +* ``socket.socket.fromfd`` +* ``socket.socketpair()`` + +The default value of *cloexec* is ``False`` and the default value of +*blocking* is ``True``. + +The atomicity is not guaranteed. If the platform does not support +setting close-on-exec and blocking flags at the creation of the file +descriptor or socket, the flags are set using additional system calls. + + +New Functions +------------- + +Add new functions the get and set the close-on-exec flag of a file +descriptor, available on all platforms: + +* ``os.get_cloexec(fd:int) -> bool`` +* ``os.set_cloexec(fd:int, cloexec: bool)`` + +Add new functions the get and set the blocking flag of a file +descriptor, only available on UNIX: + +* ``os.get_blocking(fd:int) -> bool`` +* ``os.set_blocking(fd:int, blocking: bool)`` + + +Other Changes +------------- + +The ``subprocess.Popen`` class must clear the close-on-exec flag of file +descriptors of the ``pass_fds`` parameter. The flag is cleared in the +child process before executing the program; the change does not change +the flag in the parent process. + +The close-on-exec flag must also be set on private file descriptors and +sockets in the Python standard library. For example, on UNIX, +os.urandom() opens ``/dev/urandom`` to read some random bytes and the +file descriptor is closed at function exit. The file descriptor is not +expected to be inherited by child processes. + + +Rejected Alternatives +===================== + +PEP 433 +------- + +The PEP 433 entitled "Easier suppression of file descriptor inheritance" +is a previous attempt proposing various other alternatives, but no +consensus could be reached. + +This PEP has a well defined behaviour (the default value of the new +*cloexec* parameter is not configurable), is more conservative (no +backward compatibility issue), and is much simpler. + + +Add blocking parameter for file descriptors and use Windows overlapped I/O +-------------------------------------------------------------------------- + +Windows supports non-blocking operations on files using an extension of +the Windows API called "Overlapped I/O". Using this extension requires +to modify the Python standard library and applications to pass a +``OVERLAPPED`` structure and an event loop to wait for the completion of +operations. + +This PEP only tries to expose portable flags on file descriptors and +sockets. Supporting overlapped I/O requires an abstraction providing a +high-level and portable API for asynchronous operations on files and +sockets. Overlapped I/O are out of the scope of this PEP. + +UNIX supports non-blocking files, moreover recent versions of operating +systems support setting the non-blocking flag at the creation of a file +descriptor. It would be possible to add a new optional *blocking* +parameter to Python functions creating file descriptors. On Windows, +creating a file descriptor with ``blocking=False`` would raise a +``NotImplementedError``. This behaviour is not acceptable for the ``os`` +module which is designed as a thin wrapper on the C functions of the +operating system. If a platform does not support a function, the +function should not be available on the platform. For example, +the ``os.fork()`` function is not available on Windows. + +UNIX has more flag on file descriptors: ``O_DSYNC``, ``O_SYNC``, +``O_DIRECT``, etc. Adding all these flags complicates the signature and +the implementation of functions creating file descriptor like open(). +Moreover, these flags do not work on any file type, and are not +portable. + +For all these reasons, this alternative was rejected. The PEP 3156 +proposes an abstraction for asynchronous I/O supporting non-blocking +files on Windows. + + +Links +===== + +Python issues: + +* `#10115: Support accept4() for atomic setting of flags at socket + creation `_ +* `#12105: open() does not able to set flags, such as O_CLOEXEC + `_ +* `#12107: TCP listening sockets created without FD_CLOEXEC flag + `_ +* `#16850: Add "e" mode to open(): close-and-exec + (O_CLOEXEC) / O_NOINHERIT `_ +* `#16860: Use O_CLOEXEC in the tempfile module + `_ +* `#16946: subprocess: _close_open_fd_range_safe() does not set + close-on-exec flag on Linux < 2.6.23 if O_CLOEXEC is defined + `_ +* `#17070: Use the new cloexec to improve security and avoid bugs + `_ + +Other links: + +* `Secure File Descriptor Handling + `_ (Ulrich Drepper, + 2008) +* `Ghosts of Unix past, part 2: Conflated designs + `_ (Neil Brown, 2010) explains the + history of ``O_CLOEXEC`` and ``O_NONBLOCK`` flags + + +Copyright +========= + +This document has been placed into the public domain. + diff --git a/pep-0447.txt b/pep-0447.txt new file mode 100644 --- /dev/null +++ b/pep-0447.txt @@ -0,0 +1,408 @@ +PEP: 447 +Title: Add __locallookup__ method to metaclass +Version: $Revision$ +Last-Modified: $Date$ +Author: Ronald Oussoren +Status: Draft +Type: Standards Track +Content-Type: text/x-rst +Created: 12-Jun-2013 +Post-History: 2-Jul-2013, 15-Jul-2013, 29-Jul-2013 + + +Abstract +======== + +Currently ``object.__getattribute__`` and ``super.__getattribute__`` peek +in the ``__dict__`` of classes on the MRO for a class when looking for +an attribute. This PEP adds an optional ``__locallookup__`` method to +a metaclass that can be used to override this behavior. + +Rationale +========= + +It is currently not possible to influence how the `super class`_ looks +up attributes (that is, ``super.__getattribute__`` unconditionally +peeks in the class ``__dict__``), and that can be problematic for +dynamic classes that can grow new methods on demand. + +The ``__locallookup__`` method makes it possible to dynamicly add +attributes even when looking them up using the `super class`_. + +The new method affects ``object.__getattribute__`` (and +`PyObject_GenericGetAttr`_) as well for consistency. + +Background +---------- + +The current behavior of ``super.__getattribute__`` causes problems for +classes that are dynamic proxies for other (non-Python) classes or types, +an example of which is `PyObjC`_. PyObjC creates a Python class for every +class in the Objective-C runtime, and looks up methods in the Objective-C +runtime when they are used. This works fine for normal access, but doesn't +work for access with ``super`` objects. Because of this PyObjC currently +includes a custom ``super`` that must be used with its classes. + +The API in this PEP makes it possible to remove the custom ``super`` and +simplifies the implementation because the custom lookup behavior can be +added in a central location. + + +The superclass attribute lookup hook +==================================== + +Both ``super.__getattribute__`` and ``object.__getattribute__`` (or +`PyObject_GenericGetAttr`_ in C code) walk an object's MRO and peek in the +class' ``__dict__`` to look up attributes. A way to affect this lookup is +using a method on the meta class for the type, that by default looks up +the name in the class ``__dict__``. + +In Python code +-------------- + +A meta type can define a method ``__locallookup__`` that is called during +attribute resolution by both ``super.__getattribute__`` and ``object.__getattribute``:: + + class MetaType(type): + def __locallookup__(cls, name): + try: + return cls.__dict__[name] + except KeyError: + raise AttributeError(name) from None + +The ``__locallookup__`` method has as its arguments a class and the name of the attribute +that is looked up. It should return the value of the attribute without invoking descriptors, +or raise `AttributeError`_ when the name cannot be found. + +The `type`_ class provides a default implementation for ``__locallookup__``, that +looks up the name in the class dictionary. + +Example usage +............. + +The code below implements a silly metaclass that redirects attribute lookup to uppercase +versions of names:: + + class UpperCaseAccess (type): + def __locallookup__(cls, name): + return cls.__dict__[name.upper()] + + class SillyObject (metaclass=UpperCaseAccess): + def m(self): + return 42 + + def M(self): + return "fourtytwo" + + obj = SillyObject() + assert obj.m() == "fortytwo" + + +In C code +--------- + +A new slot ``tp_locallookup`` is added to the ``PyTypeObject`` struct, this slot +corresponds to the ``__locallookup__`` method on `type`_. + +The slot has the following prototype:: + + PyObject* (*locallookupfunc)(PyTypeObject* cls, PyObject* name); + +This method should lookup *name* in the namespace of *cls*, without looking at superclasses, +and should not invoke descriptors. The method returns ``NULL`` without setting an exception +when the *name* cannot be found, and returns a new reference otherwise (not a borrowed reference). + +Use of this hook by the interpreter +----------------------------------- + +The new method is required for metatypes and as such is defined on `type_`. Both +``super.__getattribute__`` and ``object.__getattribute__``/`PyObject_GenericGetAttr`_ +(through ``_PyType_Lookup``) use the this ``__locallookup__`` method when walking +the MRO. + +Other changes to the implementation +----------------------------------- + +The change for `PyObject_GenericGetAttr`_ will be done by changing the private function +``_PyType_Lookup``. This currently returns a borrowed reference, but must return a new +reference when the ``__locallookup__`` method is present. Because of this ``_PyType_Lookup`` +will be renamed to ``_PyType_LookupName``, this will cause compile-time errors for all out-of-tree +users of this private API. + +The attribute lookup cache in ``Objects/typeobject.c`` is disabled for classes that have a +metaclass that overrides ``__locallookup__``, because using the cache might not be valid +for such classes. + +Performance impact +------------------ + +The pybench output below compares an implementation of this PEP with the regular +source tree, both based on changeset a5681f50bae2, run on an idle machine an +Core i7 processor running Centos 6.4. + +Even though the machine was idle there were clear differences between runs, +I've seen difference in "minimum time" vary from -0.1% to +1.5%, with simular +(but slightly smaller) differences in the "average time" difference. + +:: + + ------------------------------------------------------------------------------- + PYBENCH 2.1 + ------------------------------------------------------------------------------- + * using CPython 3.4.0a0 (default, Jul 29 2013, 13:01:34) [GCC 4.4.7 20120313 (Red Hat 4.4.7-3)] + * disabled garbage collection + * system check interval set to maximum: 2147483647 + * using timer: time.perf_counter + * timer: resolution=1e-09, implementation=clock_gettime(CLOCK_MONOTONIC) + + ------------------------------------------------------------------------------- + Benchmark: pep447.pybench + ------------------------------------------------------------------------------- + + Rounds: 10 + Warp: 10 + Timer: time.perf_counter + + Machine Details: + Platform ID: Linux-2.6.32-358.114.1.openstack.el6.x86_64-x86_64-with-centos-6.4-Final + Processor: x86_64 + + Python: + Implementation: CPython + Executable: /tmp/default-pep447/bin/python3 + Version: 3.4.0a0 + Compiler: GCC 4.4.7 20120313 (Red Hat 4.4.7-3) + Bits: 64bit + Build: Jul 29 2013 14:09:12 (#default) + Unicode: UCS4 + + + ------------------------------------------------------------------------------- + Comparing with: default.pybench + ------------------------------------------------------------------------------- + + Rounds: 10 + Warp: 10 + Timer: time.perf_counter + + Machine Details: + Platform ID: Linux-2.6.32-358.114.1.openstack.el6.x86_64-x86_64-with-centos-6.4-Final + Processor: x86_64 + + Python: + Implementation: CPython + Executable: /tmp/default/bin/python3 + Version: 3.4.0a0 + Compiler: GCC 4.4.7 20120313 (Red Hat 4.4.7-3) + Bits: 64bit + Build: Jul 29 2013 13:01:34 (#default) + Unicode: UCS4 + + + Test minimum run-time average run-time + this other diff this other diff + ------------------------------------------------------------------------------- + BuiltinFunctionCalls: 45ms 44ms +1.3% 45ms 44ms +1.3% + BuiltinMethodLookup: 26ms 27ms -2.4% 27ms 27ms -2.2% + CompareFloats: 33ms 34ms -0.7% 33ms 34ms -1.1% + CompareFloatsIntegers: 66ms 67ms -0.9% 66ms 67ms -0.8% + CompareIntegers: 51ms 50ms +0.9% 51ms 50ms +0.8% + CompareInternedStrings: 34ms 33ms +0.4% 34ms 34ms -0.4% + CompareLongs: 29ms 29ms -0.1% 29ms 29ms -0.0% + CompareStrings: 43ms 44ms -1.8% 44ms 44ms -1.8% + ComplexPythonFunctionCalls: 44ms 42ms +3.9% 44ms 42ms +4.1% + ConcatStrings: 33ms 33ms -0.4% 33ms 33ms -1.0% + CreateInstances: 47ms 48ms -2.9% 47ms 49ms -3.4% + CreateNewInstances: 35ms 36ms -2.5% 36ms 36ms -2.5% + CreateStringsWithConcat: 69ms 70ms -0.7% 69ms 70ms -0.9% + DictCreation: 52ms 50ms +3.1% 52ms 50ms +3.0% + DictWithFloatKeys: 40ms 44ms -10.1% 43ms 45ms -5.8% + DictWithIntegerKeys: 32ms 36ms -11.2% 35ms 37ms -4.6% + DictWithStringKeys: 29ms 34ms -15.7% 35ms 40ms -11.0% + ForLoops: 30ms 29ms +2.2% 30ms 29ms +2.2% + IfThenElse: 38ms 41ms -6.7% 38ms 41ms -6.9% + ListSlicing: 36ms 36ms -0.7% 36ms 37ms -1.3% + NestedForLoops: 43ms 45ms -3.1% 43ms 45ms -3.2% + NestedListComprehensions: 39ms 40ms -1.7% 39ms 40ms -2.1% + NormalClassAttribute: 86ms 82ms +5.1% 86ms 82ms +5.0% + NormalInstanceAttribute: 42ms 42ms +0.3% 42ms 42ms +0.0% + PythonFunctionCalls: 39ms 38ms +3.5% 39ms 38ms +2.8% + PythonMethodCalls: 51ms 49ms +3.0% 51ms 50ms +2.8% + Recursion: 67ms 68ms -1.4% 67ms 68ms -1.4% + SecondImport: 41ms 36ms +12.5% 41ms 36ms +12.6% + SecondPackageImport: 45ms 40ms +13.1% 45ms 40ms +13.2% + SecondSubmoduleImport: 92ms 95ms -2.4% 95ms 98ms -3.6% + SimpleComplexArithmetic: 28ms 28ms -0.1% 28ms 28ms -0.2% + SimpleDictManipulation: 57ms 57ms -1.0% 57ms 58ms -1.0% + SimpleFloatArithmetic: 29ms 28ms +4.7% 29ms 28ms +4.9% + SimpleIntFloatArithmetic: 37ms 41ms -8.5% 37ms 41ms -8.7% + SimpleIntegerArithmetic: 37ms 41ms -9.4% 37ms 42ms -10.2% + SimpleListComprehensions: 33ms 33ms -1.9% 33ms 34ms -2.9% + SimpleListManipulation: 28ms 30ms -4.3% 29ms 30ms -4.1% + SimpleLongArithmetic: 26ms 26ms +0.5% 26ms 26ms +0.5% + SmallLists: 40ms 40ms +0.1% 40ms 40ms +0.1% + SmallTuples: 46ms 47ms -2.4% 46ms 48ms -3.0% + SpecialClassAttribute: 126ms 120ms +4.7% 126ms 121ms +4.4% + SpecialInstanceAttribute: 42ms 42ms +0.6% 42ms 42ms +0.8% + StringMappings: 94ms 91ms +3.9% 94ms 91ms +3.8% + StringPredicates: 48ms 49ms -1.7% 48ms 49ms -2.1% + StringSlicing: 45ms 45ms +1.4% 46ms 45ms +1.5% + TryExcept: 23ms 22ms +4.9% 23ms 22ms +4.8% + TryFinally: 32ms 32ms -0.1% 32ms 32ms +0.1% + TryRaiseExcept: 17ms 17ms +0.9% 17ms 17ms +0.5% + TupleSlicing: 49ms 48ms +1.1% 49ms 49ms +1.0% + WithFinally: 48ms 47ms +2.3% 48ms 47ms +2.4% + WithRaiseExcept: 45ms 44ms +0.8% 45ms 45ms +0.5% + ------------------------------------------------------------------------------- + Totals: 2284ms 2287ms -0.1% 2306ms 2308ms -0.1% + + (this=pep447.pybench, other=default.pybench) + + +A run of the benchmark suite (with option "-b 2n3") also seems to indicate that +the performance impact is minimal:: + + Report on Linux fangorn.local 2.6.32-358.114.1.openstack.el6.x86_64 #1 SMP Wed Jul 3 02:11:25 EDT 2013 x86_64 x86_64 + Total CPU cores: 8 + + ### call_method_slots ### + Min: 0.304120 -> 0.282791: 1.08x faster + Avg: 0.304394 -> 0.282906: 1.08x faster + Significant (t=2329.92) + Stddev: 0.00016 -> 0.00004: 4.1814x smaller + + ### call_simple ### + Min: 0.249268 -> 0.221175: 1.13x faster + Avg: 0.249789 -> 0.221387: 1.13x faster + Significant (t=2770.11) + Stddev: 0.00012 -> 0.00013: 1.1101x larger + + ### django_v2 ### + Min: 0.632590 -> 0.601519: 1.05x faster + Avg: 0.635085 -> 0.602653: 1.05x faster + Significant (t=321.32) + Stddev: 0.00087 -> 0.00051: 1.6933x smaller + + ### fannkuch ### + Min: 1.033181 -> 0.999779: 1.03x faster + Avg: 1.036457 -> 1.001840: 1.03x faster + Significant (t=260.31) + Stddev: 0.00113 -> 0.00070: 1.6112x smaller + + ### go ### + Min: 0.526714 -> 0.544428: 1.03x slower + Avg: 0.529649 -> 0.547626: 1.03x slower + Significant (t=-93.32) + Stddev: 0.00136 -> 0.00136: 1.0028x smaller + + ### iterative_count ### + Min: 0.109748 -> 0.116513: 1.06x slower + Avg: 0.109816 -> 0.117202: 1.07x slower + Significant (t=-357.08) + Stddev: 0.00008 -> 0.00019: 2.3664x larger + + ### json_dump_v2 ### + Min: 2.554462 -> 2.609141: 1.02x slower + Avg: 2.564472 -> 2.620013: 1.02x slower + Significant (t=-76.93) + Stddev: 0.00538 -> 0.00481: 1.1194x smaller + + ### meteor_contest ### + Min: 0.196336 -> 0.191925: 1.02x faster + Avg: 0.196878 -> 0.192698: 1.02x faster + Significant (t=61.86) + Stddev: 0.00053 -> 0.00041: 1.2925x smaller + + ### nbody ### + Min: 0.228039 -> 0.235551: 1.03x slower + Avg: 0.228857 -> 0.236052: 1.03x slower + Significant (t=-54.15) + Stddev: 0.00130 -> 0.00029: 4.4810x smaller + + ### pathlib ### + Min: 0.108501 -> 0.105339: 1.03x faster + Avg: 0.109084 -> 0.105619: 1.03x faster + Significant (t=311.08) + Stddev: 0.00022 -> 0.00011: 1.9314x smaller + + ### regex_effbot ### + Min: 0.057905 -> 0.056447: 1.03x faster + Avg: 0.058055 -> 0.056760: 1.02x faster + Significant (t=79.22) + Stddev: 0.00006 -> 0.00015: 2.7741x larger + + ### silent_logging ### + Min: 0.070810 -> 0.072436: 1.02x slower + Avg: 0.070899 -> 0.072609: 1.02x slower + Significant (t=-191.59) + Stddev: 0.00004 -> 0.00008: 2.2640x larger + + ### spectral_norm ### + Min: 0.290255 -> 0.299286: 1.03x slower + Avg: 0.290335 -> 0.299541: 1.03x slower + Significant (t=-572.10) + Stddev: 0.00005 -> 0.00015: 2.8547x larger + + ### threaded_count ### + Min: 0.107215 -> 0.115206: 1.07x slower + Avg: 0.107488 -> 0.115996: 1.08x slower + Significant (t=-109.39) + Stddev: 0.00016 -> 0.00076: 4.8665x larger + + The following not significant results are hidden, use -v to show them: + call_method, call_method_unknown, chaos, fastpickle, fastunpickle, float, formatted_logging, hexiom2, json_load, normal_startup, nqueens, pidigits, raytrace, regex_compile, regex_v8, richards, simple_logging, startup_nosite, telco, unpack_sequence. + + +Alternative proposals +--------------------- + +``__getattribute_super__`` +.......................... + +An earlier version of this PEP used the following static method on classes:: + + def __getattribute_super__(cls, name, object, owner): pass + +This method performed name lookup as well as invoking descriptors and was necessarily +limited to working only with ``super.__getattribute__``. + + +Reuse ``tp_getattro`` +..................... + +It would be nice to avoid adding a new slot, thus keeping the API simpler and +easier to understand. A comment on `Issue 18181`_ asked about reusing the +``tp_getattro`` slot, that is super could call the ``tp_getattro`` slot of all +methods along the MRO. + +That won't work because ``tp_getattro`` will look in the instance +``__dict__`` before it tries to resolve attributes using classes in the MRO. +This would mean that using ``tp_getattro`` instead of peeking the class +dictionaries changes the semantics of the `super class`_. + + +References +========== + +* `Issue 18181`_ contains a prototype implementation + +Copyright +========= + +This document has been placed in the public domain. + +.. _`Issue 18181`: http://bugs.python.org/issue18181 + +.. _`super class`: http://docs.python.org/3/library/functions.html#super + +.. _`NotImplemented`: http://docs.python.org/3/library/constants.html#NotImplemented + +.. _`PyObject_GenericGetAttr`: http://docs.python.org/3/c-api/object.html#PyObject_GenericGetAttr + +.. _`type`: http://docs.python.org/3/library/functions.html#type + +.. _`AttributeError`: http://docs.python.org/3/library/exceptions.html#AttributeError + +.. _`PyObjC`: http://pyobjc.sourceforge.net/ + +.. _`classmethod`: http://docs.python.org/3/library/functions.html#classmethod diff --git a/pep-0448.txt b/pep-0448.txt new file mode 100644 --- /dev/null +++ b/pep-0448.txt @@ -0,0 +1,247 @@ +PEP: 448 +Title: Additional Unpacking Generalizations +Version: $Revision$ +Last-Modified: $Date$ +Author: Joshua Landau +Discussions-To: python-ideas at python.org +Status: Draft +Type: Standards Track +Content-Type: text/x-rst +Created: 29-Jun-2013 +Python-Version: 3.4 +Post-History: + + +Abstract +======== + +This PEP proposes extended usages of the ``*`` iterable unpacking +operator to allow unpacking in more positions, an arbitrary number of +times, and in several additional circumstances. + +Specifically: + +Arbitrarily positioned unpacking operators:: + + >>> print(*[1], *[2], 3) + 1 2 3 + >>> dict(**{'x': 1}, y=3, **{'z': 2}) + {'x': 1, 'y': 2, 'z': 3} + +Function calls currently have the restriction that keyword arguments +must follow positional arguments and ``**`` unpackings must additionally +follow ``*`` unpackings. Because of the new levity for ``*`` and ``**`` +unpackings, it may be advisable to lift some or all of these +restrictions. + +As currently, if an argument is given multiple times - such as a +positional argument given both positionally and by keyword - a +TypeError is raised. + +Unpacking is proposed to be allowed inside tuples, lists, sets, +dictionaries and comprehensions:: + + >>> *range(4), 4 + (0, 1, 2, 3, 4) + >>> [*range(4), 4] + [0, 1, 2, 3, 4] + >>> {*range(4), 4} + {0, 1, 2, 3, 4} + >>> {'x': 1, **{'y': 2}} + {'x': 1, 'y': 2} + + >>> ranges = [range(i) for i in range(5)] + >>> [*item for item in ranges] + [0, 0, 1, 0, 1, 2, 0, 1, 2, 3] + + +Rationale +========= + +Current usage of the ``*`` iterable unpacking operator features +unnecessary restrictions that can harm readability. + +Unpacking multiple times has an obvious rationale. When you want to +unpack several iterables into a function definition or follow an unpack +with more positional arguments, the most natural way would be to write:: + + function(**kw_arguments, **more_arguments) + + function(*arguments, argument) + +Simple examples where this is useful are ``print`` and ``str.format``. +Instead, you could be forced to write:: + + kwargs = dict(kw_arguments) + kwargs.update(more_arguments) + function(**kwargs) + + args = list(arguments) + args.append(arg) + function(*args) + +or, if you know to do so:: + + from collections import ChainMap + function(**ChainMap(more_arguments, arguments)) + + from itertools import chain + function(*chain(args, [arg])) + +which add unnecessary line-noise and, with the first methods, causes +duplication of work. + + +There are two primary rationales for unpacking inside of containers. +Firstly there is a symmetry of assignment, where ``fst, *other, lst = +elems`` and ``elems = fst, *other, lst`` are approximate inverses, +ignoring the specifics of types. This, in effect, simplifies the +language by removing special cases. + +Secondly, it vastly simplifies types of "addition" such as combining +dictionaries, and does so in an unambiguous and well-defined way:: + + combination = {**first_dictionary, "x": 1, "y": 2} + +instead of:: + + combination = first_dictionary.copy() + combination.update({"x": 1, "y": 2}) + +which is especially important in contexts where expressions are +preferred. This is also useful as a more readable way of summing +iterables into a list, such as ``my_list + list(my_tuple) + +list(my_range)`` which is now equivalent to just ``[*my_list, +*my_tuple, *my_range]``. + + +The addition of unpacking to comprehensions is a logical extension. +It's usage will primarily be a neat replacement for ``[i for j in +2D_list for i in j]``, as the more readable ``[*l for l in 2D_list]``. +Other uses are possible, but expected to occur rarely. + + +Specification +============= + +Function calls may accept an unbound number of ``*`` and ``**`` +unpackings. There will be no restriction of the order of positional +arguments with relation to ``*`` unpackings nor any restriction of the +order of keyword arguments with relation to ``**`` unpackings. + +Function calls currently have the restriction that keyword arguments +must follow positional arguments and ``**`` unpackings must additionally +follow ``*`` unpackings. Because of the new levity for ``*`` and ``**`` +unpackings, it may be advisable to list some or all of these +restrictions. + +As currently, if an argument is given multiple times - such as a +positional argument given both positionally and by keyword - a +TypeError is raised. + +If the restrictions are kept, a function call will look like this:: + + function( + argument or *args, argument or *args, ..., + kwargument or *args, kwargument or *args, ..., + kwargument or **kwargs, kwargument or **kwargs, ... + ) + +If they are removed completely, a function call will look like this:: + + function( + argument or keyword_argument or *args or **kwargs, + argument or keyword_argument or *args or **kwargs, + ... + ) + + +Tuples, lists, sets and dictionaries will allow unpacking. This will +act as if the elements from unpacked items were inserted in order at +the site of unpacking, much as happens in unpacking in a function-call. +Dictionaries require ``**`` unpacking; all the others require ``*`` unpacking. +A dictionary's key remain in a right-to-left priority order, so +``{**{'a': 1}, 'a': 2, **{'a': 3}}`` evaluates to ``{'a': 3}``. There +is no restriction on the number or position of unpackings. + +Comprehensions, by simple extension, will support unpacking. As before, +dictionaries require ``**`` unpacking, all the others require ``*`` +unpacking and key priorities are unchanged. + +Examples include:: + + {*[1, 2, 3], 4, 5, *{6, 7, 8}} + + (*e for e in [[1], [3, 4, 5], [2]]) + + {**dictionary for dictionary in (globals(), locals())} + + {**locals(), "override": None} + + +Disadvantages +============= + +If the current restrictions for function call arguments (keyword +arguments must follow positional arguments and ``**`` unpackings must +additionally follow ``*`` unpackings) are kept, the allowable orders +for arguments in a function call is more complicated than before. +The simplest explanation for the rules may be "positional arguments +come first and keyword arguments follow, but ``*`` unpackings are +allowed after keyword arguments". + +If the current restrictions are lifted, there are no obvious gains to +code as the only new orders that are allowed look silly: ``f(a, e=e, +d=d, b, c)`` being a simpler example. + + +Whilst ``*elements, = iterable`` causes ``elements`` to be a list, +``elements = *iterable,`` causes ``elements`` to be a tuple. The +reason for this may not be obvious at first glance and may confuse +people unfamiliar with the construct. + + +Implementation +============== + +An implementation for an old version of Python 3 is found at Issue +2292 on bug tracker [1]_, although several changes should be made: + +- It has yet to be updated to the most recent Python version + +- It features a now redundant replacement for "yield from" which + should be removed + +- It also loses support for calling function with keyword arguments before + positional arguments, which is an unnecessary backwards-incompatible change + +- If the restrictions on the order of arguments in a function call are + partially or fully lifted, they would need to be included + + +References +========== + +.. [1] Issue 2292, "Missing `*`-unpacking generalizations", Thomas Wouters + (http://bugs.python.org/issue2292) + +.. [2] Discussion on Python-ideas list, + "list / array comprehensions extension", Alexander Heger + (http://mail.python.org/pipermail/python-ideas/2011-December/013097.html) + + +Copyright +========= + +This document has been placed in the public domain. + + + +.. + Local Variables: + mode: indented-text + indent-tabs-mode: nil + sentence-end-double-space: t + fill-column: 70 + coding: utf-8 + End: diff --git a/pep-0466/test_cloexec.py b/pep-0466/test_cloexec.py new file mode 100644 --- /dev/null +++ b/pep-0466/test_cloexec.py @@ -0,0 +1,50 @@ +import os, fcntl, sys, errno + +def get_cloexec(fd): + try: + flags = fcntl.fcntl(fd, fcntl.F_GETFD) + return bool(flags & fcntl.FD_CLOEXEC) + except IOError as err: + if err.errno == errno.EBADF: + return '' + else: + return str(err) + +def set_cloexec(fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFD) + flags |= fcntl.FD_CLOEXEC + fcntl.fcntl(fd, fcntl.F_SETFD, flags) + +def main(): + f = open(__file__, "rb") + fd = f.fileno() + print("initial state: fd=%s, cloexec=%s" % (fd, get_cloexec(fd))) + + + pid = os.fork() + if not pid: + set_cloexec(fd) + print("child process after fork, set cloexec: cloexec=%s" % get_cloexec(fd)) + child_argv = [sys.executable, __file__, str(fd), + 'child process after exec'] + os.execv(child_argv[0], child_argv) + + os.waitpid(pid, 0) + print("parent process after fork: cloexec=%s" % get_cloexec(fd)) + child_argv = [sys.executable, __file__, str(fd), + 'parent process after exec'] + os.execv(child_argv[0], child_argv) + +def after_exec(): + fd = int(sys.argv[1]) + name = sys.argv[2] + print("%s: fd=%s, cloexec=%s" + % (name, fd, get_cloexec(fd))) + sys.exit() + +if __name__ == "__main__": + if len(sys.argv) == 1: + main() + else: + after_exec() + diff --git a/pep-3150.txt b/pep-3150.txt --- a/pep-3150.txt +++ b/pep-3150.txt @@ -19,9 +19,11 @@ Python statements that do not currently have an associated code suite. This clause will create a statement local namespace for additional names that are accessible in the associated statement, but do not become part of the -containing namespace. To permit a sane implementation strategy, forward -references to names from the ``given`` clause will need to be marked -explicitly. +containing namespace. + +Adoption of a new symbol, ``?``, is proposed to denote a forward reference +to the namespace created by running the associated code suite. It will be +a reference to a ``types.SimpleNamespace`` object. The primary motivation is to enable a more declarative style of programming, where the operation to be performed is presented to the reader first, and the @@ -72,12 +74,16 @@ name in the header line, with the actual definitions following in the indented clause. As a simple example:: - sorted_data = sorted(data, key=.sort_key) given: + sorted_data = sorted(data, key=?.sort_key) given: def sort_key(item): return item.attr1, item.attr2 -The leading ``.`` on ``.sort_key`` indicates to the compiler that this -is a forward reference to a name defined in the ``given`` clause. +The new symbol ``?`` is used to refer to the given namespace. It would be a +``types.SimpleNamespace`` instance, so ``?.sort_key`` functions as +a forward reference to a name defined in the ``given`` clause. + +A docstring would be permitted in the given clause, and would be attached +to the result namespace as its ``__doc__`` attribute. The ``pass`` statement is included to provide a consistent way to skip inclusion of a meaningful expression in the header line. While this is not @@ -94,7 +100,7 @@ # Explicit early binding via given clause seq = [] for i in range(10): - seq.append(.f) given i=i: + seq.append(.f) given i=i in: def f(): return i assert [f() for f in seq] == list(range(10)) @@ -105,7 +111,7 @@ The following statement:: - op(.f, .g) given bound_a=a, bound_b=b: + op(?.f, ?.g) given bound_a=a, bound_b=b in: def f(): return bound_a + bound_b def g(): @@ -121,9 +127,10 @@ return bound_a + bound_b def g(): return bound_a - bound_b - return f, g - __ref1, __ref2 = __scope(__arg1) - op(__ref1, __ref2) + return types.SimpleNamespace(**locals()) + __ref = __scope(__arg1, __arg2) + __ref.__doc__ = __scope.__doc__ + op(__ref.f, __ref.g) A ``given`` clause is essentially a nested function which is created and then immediately executed. Unless explicitly passed in, names are looked @@ -158,7 +165,7 @@ yield_stmt: yield_expr [given_clause] raise_stmt: 'raise' [test ['from' test]] [given_clause] assert_stmt: 'assert' test [',' test] [given_clause] - given_clause: "given" (NAME '=' test)* ":" suite + given_clause: "given" [(NAME '=' test)+ "in"]":" suite (Note that ``expr_stmt`` in the grammar is a slight misnomer, as it covers assignment and augmented assignment in addition to simple expression @@ -207,7 +214,7 @@ flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt In addition to the above changes, the definition of ``atom`` would be changed -to also allow ``"." NAME``. The restriction of this usage to statements with +to also allow ``?``. The restriction of this usage to statements with an associated ``given`` clause would be handled by a later stage of the compilation process (likely AST construction, which already enforces other restrictions where the grammar is overly permissive in order to @@ -277,13 +284,14 @@ However, while they are the initial motivating use case, limiting this feature solely to simple assignments would be overly restrictive. Once the feature is defined at all, it would be quite arbitrary to prevent its use -for augmented assignments, return statements, yield expressions and -arbitrary expressions that may modify the application state. +for augmented assignments, return statements, yield expressions, +comprehensions and arbitrary expressions that may modify the +application state. The ``given`` clause may also function as a more readable alternative to some uses of lambda expressions and similar constructs when passing one-off functions to operations -like ``sorted()``. +like ``sorted()`` or in callback based event-driven programming. In module and class level code, the ``given`` clause will serve as a clear and reliable replacement for usage of the ``del`` statement to keep @@ -350,7 +358,7 @@ # would be equivalent to - seq2 = .result given seq=seq: + seq2 = ?.result given seq=seq: result = [] for y in seq: if p(y): @@ -367,7 +375,7 @@ provide a precisely equivalent expansion for a generator expression. The closest it can get is to define an additional level of scoping:: - seq2 = .g(seq) given: + seq2 = ?.g(seq) given: def g(seq): for y in seq: if p(y): @@ -375,6 +383,22 @@ if q(x): yield x +This limitation could be remedied by permitting the given clause to be +a generator function, in which case ? would refer to a generator-iterator +object rather than a simple namespace:: + + seq2 = ? given seq=seq in: + for y in seq: + if p(y): + for x in y: + if q(x): + yield x + +However, this would make the meaning of "?" quite ambiguous, even more so +than is already the case for the meaning of ``def`` statements (which will +usually have a docstring indicating whether or not a function definition is +actually a generator) + Explaining Decorator Clause Evaluation and Application ------------------------------------------------------ @@ -477,14 +501,19 @@ I believe the proposal in this PEP would finally let Python get close to the "executable pseudocode" bar for the kind of thought expressed above:: - sorted_list = sorted(original, key=.sort_key) given: - def sort_key(item): + sorted_list = sorted(original, key=?.key) given: + def key(item): return item.attr1, item.attr2 -Everything is in the same order as it was in the user's original thought, the -only addition they have to make is to give the sorting criteria a name so that -the usage can be linked up to the subsequent definition. - +Everything is in the same order as it was in the user's original thought, and +they don't even need to come up with a name for the sorting criteria: it is +possible to reuse the keyword argument name directly. + +A possible enhancement to those proposal would be to provide a convenient +shorthand syntax to say "use the given clause contents as keyword +arguments". Even without dedicated syntax, that can be written simply as +``**vars(?)``. + Harmful to Introspection ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -516,7 +545,7 @@ This is more of a deficiency in the PEP rather than the idea, though. If it wasn't a real world problem, we wouldn't get so many complaints about the lack of multi-line lambda support and Ruby's block construct -probaly wouldn't be quite so popular. +probably wouldn't be quite so popular. Open Questions @@ -525,9 +554,12 @@ Syntax for Forward References ----------------------------- -The leading ``.`` arguably fails the "syntax shall not look like grit on -Uncle Tim's monitor" test. However, it does have the advantages of being -easy to type and already having an association with namespaces. +The ``?`` symbol is proposed for forward references to the given namespace +as it is short, currently unused and suggests "there's something missing +here that will be filled in later". + +The proposal in the PEP doesn't neatly parallel any existing Python feature, +so reusing an already used symbol has been deliberately avoided. Handling of ``nonlocal`` and ``global`` @@ -541,8 +573,8 @@ functions were defined as in the expansion above. -Detailed Semantics #3: Handling of ``break`` and ``continue`` -------------------------------------------------------------- +Handling of ``break`` and ``continue`` +-------------------------------------- ``break`` and ``continue`` will operate as if the anonymous functions were defined as in the expansion above. They will be syntax errors if they occur @@ -561,6 +593,25 @@ Examples ======== +Defining callbacks for event driven programming:: + + # Current Python (definition before use) + def cb(sock): + # Do something with socket + def eb(exc): + logging.exception( + "Failed connecting to %s:%s", host, port) + loop.create_connection((host, port), cb, eb) given: + + # Becomes: + loop.create_connection((host, port), ?.cb, ?.eb) given: + def cb(sock): + # Do something with socket + def eb(exc): + logging.exception( + "Failed connecting to %s:%s", host, port) + + Defining "one-off" classes which typically only have a single instance:: # Current Python (instantiation after definition) @@ -579,7 +630,7 @@ ... # However many lines # Becomes: - public_name = .MeaningfulClassName(*params) given: + public_name = ?.MeaningfulClassName(*params) given: class MeaningfulClassName(): ... # Should trawl the stdlib for an example of doing this @@ -593,7 +644,7 @@ del _createenviron # Becomes: - environ = ._createenviron() given: + environ = ?._createenviron() given: def _createenviron(): ... # 27 line function @@ -606,7 +657,7 @@ return decorating_function # Becomes: - return .decorating_function given: + return ?.decorating_function given: # Cell variables rather than locals, but should give similar speedup tuple, sorted, len, KeyError = tuple, sorted, len, KeyError def decorating_function(user_function): @@ -701,6 +752,9 @@ .. [9] Possible PEP 3150 style guidelines (#2): http://mail.python.org/pipermail/python-ideas/2011-October/012341.html +.. [10] Multi-line lambdas (again!) + http://mail.python.org/pipermail/python-ideas/2013-August/022526.html + Copyright ========= diff --git a/pep-3156.txt b/pep-3156.txt --- a/pep-3156.txt +++ b/pep-3156.txt @@ -846,6 +846,12 @@ convention from the section "Callback Style" below) is always called with a single argument, the Future object. +- ``remove_done_callback(fn)``. Remove the argument from the list of + callbacks. This method is not defined by PEP 3148. The argument + must be equal (using ``==``) to the argument passed to + ``add_done_callback()``. Returns the number of times the callback + was removed. + - ``set_result(result)``. The Future must not be done (nor cancelled) already. This makes the Future done and schedules the callbacks. Difference with PEP 3148: This is a public API. @@ -1302,25 +1308,25 @@ - ``FIRST_EXCEPTION``: Wait until at least one Future is done (not cancelled) with an exception set. (The exclusion of cancelled - Futures from the filter is surprising, but PEP 3148 does it this - way.) + Futures from the condition is surprising, but PEP 3148 does it + this way.) - ``tulip.as_completed(fs, timeout=None)``. Returns an iterator whose - values are Futures; waiting for successive values waits until the - next Future or coroutine from the set ``fs`` completes, and returns - its result (or raises its exception). The optional argument - ``timeout`` has the same meaning and default as it does for - ``concurrent.futures.wait()``: when the timeout occurs, the next - Future returned by the iterator will raise ``TimeoutError`` when - waited for. Example of use:: + values are Futures or coroutines; waiting for successive values + waits until the next Future or coroutine from the set ``fs`` + completes, and returns its result (or raises its exception). The + optional argument ``timeout`` has the same meaning and default as it + does for ``concurrent.futures.wait()``: when the timeout occurs, the + next Future returned by the iterator will raise ``TimeoutError`` + when waited for. Example of use:: for f in as_completed(fs): result = yield from f # May raise an exception. # Use result. - Note: if you do not wait for the futures as they are produced by the - iterator, your ``for`` loop may not make progress (since you are not - allowing other tasks to run). + Note: if you do not wait for the values produced by the iterator, + your ``for`` loop may not make progress (since you are not allowing + other tasks to run). Sleeping -------- diff --git a/pep2html.py b/pep2html.py --- a/pep2html.py +++ b/pep2html.py @@ -202,7 +202,7 @@ print >> outfile, '' print >> outfile, '
\n' for k, v in header: - if k.lower() in ('author', 'discussions-to'): + if k.lower() in ('author', 'bdfl-delegate', 'discussions-to'): mailtos = [] for part in re.split(',\s*', v): if '@' in part: -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sun Aug 4 12:23:13 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sun, 4 Aug 2013 12:23:13 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NjQ3?= =?utf-8?q?=3A_Temporary_disable_the_=22nothing_to_repeat=22_check_to_make?= =?utf-8?q?_buildbots?= Message-ID: <3c7J8x6V46z7LjN@mail.python.org> http://hg.python.org/cpython/rev/e2ba4592ce3a changeset: 85017:e2ba4592ce3a branch: 2.7 parent: 85000:ee0bdc007a0f user: Serhiy Storchaka date: Sun Aug 04 13:22:30 2013 +0300 summary: Issue #18647: Temporary disable the "nothing to repeat" check to make buildbots happy. files: Lib/sre_compile.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/sre_compile.py b/Lib/sre_compile.py --- a/Lib/sre_compile.py +++ b/Lib/sre_compile.py @@ -355,8 +355,8 @@ def _simple(av): # check if av is a "simple" operator lo, hi = av[2].getwidth() - if lo == 0 and hi == MAXREPEAT: - raise error, "nothing to repeat" + #if lo == 0 and hi == MAXREPEAT: + # raise error, "nothing to repeat" return lo == hi == 1 and av[2][0][0] != SUBPATTERN def _compile_info(code, pattern, flags): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 15:10:41 2013 From: python-checkins at python.org (eli.bendersky) Date: Sun, 4 Aug 2013 15:10:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzEzNjEy?= =?utf-8?q?=3A_Fix_a_buffer_overflow_in_case_of_a_multi-byte_encoding=2E?= Message-ID: <3c7Mt96hNmzSXj@mail.python.org> http://hg.python.org/cpython/rev/b3efc140d8a6 changeset: 85018:b3efc140d8a6 branch: 2.7 user: Eli Bendersky date: Sun Aug 04 06:09:49 2013 -0700 summary: Issue #13612: Fix a buffer overflow in case of a multi-byte encoding. This is a belated backport of f7b47fb30169; Patch by Serhiy Storchaka. files: Lib/test/test_xml_etree.py | 6 ++++++ Modules/_elementtree.c | 2 ++ Modules/pyexpat.c | 7 +++++++ 3 files changed, 15 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -883,6 +883,12 @@ >>> check_encoding("iso-8859-15") >>> check_encoding("cp437") >>> check_encoding("mac-roman") + >>> check_encoding("gbk") + Traceback (most recent call last): + ValueError: multi-byte encodings are not supported + >>> check_encoding("cp037") + Traceback (most recent call last): + ParseError: unknown encoding: line 1, column 30 """ ET.XML("" % encoding) diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -2427,6 +2427,8 @@ if (PyUnicode_GET_SIZE(u) != 256) { Py_DECREF(u); + PyErr_SetString(PyExc_ValueError, + "multi-byte encodings are not supported"); return XML_STATUS_ERROR; } diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c --- a/Modules/pyexpat.c +++ b/Modules/pyexpat.c @@ -1252,6 +1252,13 @@ if (_u_string == NULL) return result; + if (PyUnicode_GET_SIZE(_u_string) != 256) { + Py_DECREF(_u_string); + PyErr_SetString(PyExc_ValueError, + "multi-byte encodings are not supported"); + return result; + } + for (i = 0; i < 256; i++) { /* Stupid to access directly, but fast */ Py_UNICODE c = _u_string->str[i]; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 15:40:02 2013 From: python-checkins at python.org (charles-francois.natali) Date: Sun, 4 Aug 2013 15:40:02 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE3Njg0?= =?utf-8?q?=3A_Fix_some_test=5Fsocket_failures_due_to_limited_FD_passing_s?= =?utf-8?q?upport?= Message-ID: <3c7NX26VTkz7Ljj@mail.python.org> http://hg.python.org/cpython/rev/b7d764807343 changeset: 85019:b7d764807343 branch: 3.3 parent: 85007:a5a5ba4f71ad user: Charles-Francois Natali date: Mon May 20 19:08:19 2013 +0200 summary: Issue #17684: Fix some test_socket failures due to limited FD passing support on OS-X. Patch by Jeff Ramnani. files: Lib/test/test_socket.py | 10 ++++------ 1 files changed, 4 insertions(+), 6 deletions(-) diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -2533,8 +2533,7 @@ def _testFDPassCMSG_LEN(self): self.createAndSendFDs(1) - # Issue #12958: The following test has problems on Mac OS X - @support.anticipate_failure(sys.platform == "darwin") + @unittest.skipIf(sys.platform == "darwin", "skipping, see issue #12958") @requireAttrs(socket, "CMSG_SPACE") def testFDPassSeparate(self): # Pass two FDs in two separate arrays. Arrays may be combined @@ -2544,7 +2543,7 @@ maxcmsgs=2) @testFDPassSeparate.client_skip - @support.anticipate_failure(sys.platform == "darwin") + @unittest.skipIf(sys.platform == "darwin", "skipping, see issue #12958") def _testFDPassSeparate(self): fd0, fd1 = self.newFDs(2) self.assertEqual( @@ -2556,8 +2555,7 @@ array.array("i", [fd1]))]), len(MSG)) - # Issue #12958: The following test has problems on Mac OS X - @support.anticipate_failure(sys.platform == "darwin") + @unittest.skipIf(sys.platform == "darwin", "skipping, see issue #12958") @requireAttrs(socket, "CMSG_SPACE") def testFDPassSeparateMinSpace(self): # Pass two FDs in two separate arrays, receiving them into the @@ -2569,7 +2567,7 @@ maxcmsgs=2, ignoreflags=socket.MSG_CTRUNC) @testFDPassSeparateMinSpace.client_skip - @support.anticipate_failure(sys.platform == "darwin") + @unittest.skipIf(sys.platform == "darwin", "skipping, see issue #12958") def _testFDPassSeparateMinSpace(self): fd0, fd1 = self.newFDs(2) self.assertEqual( -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 15:40:04 2013 From: python-checkins at python.org (charles-francois.natali) Date: Sun, 4 Aug 2013 15:40:04 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Null_merge=2E?= Message-ID: <3c7NX41MQSz7Ljk@mail.python.org> http://hg.python.org/cpython/rev/928d405fe2ad changeset: 85020:928d405fe2ad parent: 85016:c3e6df110737 parent: 85019:b7d764807343 user: Charles-Fran?ois Natali date: Sun Aug 04 15:35:57 2013 +0200 summary: Null merge. files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 17:42:43 2013 From: python-checkins at python.org (ethan.furman) Date: Sun, 4 Aug 2013 17:42:43 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Close_=2318635=3A_Move_cla?= =?utf-8?q?ss_level_private_attribute_from_instance_to_class=2E?= Message-ID: <3c7RFb3QRLz7LjN@mail.python.org> http://hg.python.org/cpython/rev/b034418e840b changeset: 85021:b034418e840b user: Ethan Furman date: Sun Aug 04 08:42:23 2013 -0700 summary: Close #18635: Move class level private attribute from instance to class. files: Lib/enum.py | 2 +- Lib/test/test_enum.py | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletions(-) diff --git a/Lib/enum.py b/Lib/enum.py --- a/Lib/enum.py +++ b/Lib/enum.py @@ -127,6 +127,7 @@ enum_class = super().__new__(metacls, cls, bases, classdict) enum_class._member_names_ = [] # names in definition order enum_class._member_map_ = OrderedDict() # name->value map + enum_class._member_type_ = member_type # Reverse value->name map for hashable values. enum_class._value2member_map_ = {} @@ -159,7 +160,6 @@ if not hasattr(enum_member, '_value_'): enum_member._value_ = member_type(*args) value = enum_member._value_ - enum_member._member_type_ = member_type enum_member._name_ = member_name enum_member.__init__(*args) # If another member with the same value was already defined, the diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py --- a/Lib/test/test_enum.py +++ b/Lib/test/test_enum.py @@ -583,6 +583,24 @@ option3 = 3 self.assertEqual(int(MailManOptions.option1), 1) + def test_introspection(self): + class Number(IntEnum): + one = 100 + two = 200 + self.assertIs(Number.one._member_type_, int) + self.assertIs(Number._member_type_, int) + class String(str, Enum): + yarn = 'soft' + rope = 'rough' + wire = 'hard' + self.assertIs(String.yarn._member_type_, str) + self.assertIs(String._member_type_, str) + class Plain(Enum): + vanilla = 'white' + one = 1 + self.assertIs(Plain.vanilla._member_type_, object) + self.assertIs(Plain._member_type_, object) + def test_no_such_enum_member(self): class Color(Enum): red = 1 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 20:51:44 2013 From: python-checkins at python.org (raymond.hettinger) Date: Sun, 4 Aug 2013 20:51:44 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_Silence_compil?= =?utf-8?q?er_warning_about_an_uninitialized_variable?= Message-ID: <3c7WRh5JXmzQr9@mail.python.org> http://hg.python.org/cpython/rev/ed8d94e4862a changeset: 85022:ed8d94e4862a branch: 3.3 parent: 85019:b7d764807343 user: Raymond Hettinger date: Sun Aug 04 11:51:03 2013 -0700 summary: Silence compiler warning about an uninitialized variable files: Objects/unicodeobject.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -3323,7 +3323,7 @@ wchar_t *wstr; PyObject *bytes = NULL; char *errmsg; - PyObject *reason; + PyObject *reason = NULL; PyObject *exc; size_t error_pos; int surrogateescape; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 20:51:46 2013 From: python-checkins at python.org (raymond.hettinger) Date: Sun, 4 Aug 2013 20:51:46 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_merge?= Message-ID: <3c7WRk0LfzzQr9@mail.python.org> http://hg.python.org/cpython/rev/f1cb19264a1f changeset: 85023:f1cb19264a1f parent: 85021:b034418e840b parent: 85022:ed8d94e4862a user: Raymond Hettinger date: Sun Aug 04 11:51:35 2013 -0700 summary: merge files: Objects/unicodeobject.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -3235,7 +3235,7 @@ wchar_t *wstr; PyObject *bytes = NULL; char *errmsg; - PyObject *reason; + PyObject *reason = NULL; PyObject *exc; size_t error_pos; int surrogateescape; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 20:52:55 2013 From: python-checkins at python.org (alexander.belopolsky) Date: Sun, 4 Aug 2013 20:52:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Fixes_=238860=3A_Round_hal?= =?utf-8?q?f-microseconds_to_even_in_the_timedelta_constructor=2E?= Message-ID: <3c7WT32jJ2zSnS@mail.python.org> http://hg.python.org/cpython/rev/f7c84ef35b00 changeset: 85024:f7c84ef35b00 parent: 85021:b034418e840b user: Alexander Belopolsky date: Sun Aug 04 14:51:35 2013 -0400 summary: Fixes #8860: Round half-microseconds to even in the timedelta constructor. (Original patch by Mark Dickinson.) files: Doc/library/datetime.rst | 10 +++-- Lib/test/datetimetester.py | 4 ++ Misc/NEWS | 1 + Modules/_datetimemodule.c | 49 ++++++++++++++++--------- 4 files changed, 42 insertions(+), 22 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -170,10 +170,12 @@ * ``0 <= seconds < 3600*24`` (the number of seconds in one day) * ``-999999999 <= days <= 999999999`` - If any argument is a float and there are fractional microseconds, the fractional - microseconds left over from all arguments are combined and their sum is rounded - to the nearest microsecond. If no argument is a float, the conversion and - normalization processes are exact (no information is lost). + If any argument is a float and there are fractional microseconds, + the fractional microseconds left over from all arguments are + combined and their sum is rounded to the nearest microsecond using + round-half-to-even tiebreaker. If no argument is a float, the + conversion and normalization processes are exact (no information is + lost). If the normalized value of days lies outside the indicated range, :exc:`OverflowError` is raised. diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -619,6 +619,10 @@ eq(td(hours=-.2/us_per_hour), td(0)) eq(td(days=-.4/us_per_day, hours=-.2/us_per_hour), td(microseconds=-1)) + # Test for a patch in Issue 8860 + eq(td(microseconds=0.5), 0.5*td(microseconds=1.0)) + eq(td(microseconds=0.5)//td.resolution, 0.5*td.resolution//td.resolution) + def test_massive_normalization(self): td = timedelta(microseconds=-1) self.assertEqual((td.days, td.seconds, td.microseconds), diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,6 +13,7 @@ Library ------- +- Issue 8860: Fixed rounding in timedelta constructor. What's New in Python 3.4.0 Alpha 1? =================================== diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -140,19 +140,6 @@ return quo; } -/* Round a double to the nearest long. |x| must be small enough to fit - * in a C long; this is not checked. - */ -static long -round_to_long(double x) -{ - if (x >= 0.0) - x = floor(x + 0.5); - else - x = ceil(x - 0.5); - return (long)x; -} - /* Nearest integer to m / n for integers m and n. Half-integer results * are rounded to even. */ @@ -1397,7 +1384,7 @@ */ /* Conversion factors. */ -static PyObject *us_per_us = NULL; /* 1 */ +static PyObject *one = NULL; /* 1 */ static PyObject *us_per_ms = NULL; /* 1000 */ static PyObject *us_per_second = NULL; /* 1000000 */ static PyObject *us_per_minute = NULL; /* 1e6 * 60 as Python int */ @@ -2119,7 +2106,7 @@ goto Done if (us) { - y = accum("microseconds", x, us, us_per_us, &leftover_us); + y = accum("microseconds", x, us, one, &leftover_us); CLEANUP; } if (ms) { @@ -2148,7 +2135,33 @@ } if (leftover_us) { /* Round to nearest whole # of us, and add into x. */ - PyObject *temp = PyLong_FromLong(round_to_long(leftover_us)); + double whole_us = round(leftover_us); + int x_is_odd; + PyObject *temp; + + whole_us = round(leftover_us); + if (fabs(whole_us - leftover_us) == 0.5) { + /* We're exactly halfway between two integers. In order + * to do round-half-to-even, we must determine whether x + * is odd. Note that x is odd when it's last bit is 1. The + * code below uses bitwise and operation to check the last + * bit. */ + temp = PyNumber_And(x, one); /* temp <- x & 1 */ + if (temp == NULL) { + Py_DECREF(x); + goto Done; + } + x_is_odd = PyObject_IsTrue(temp); + Py_DECREF(temp); + if (x_is_odd == -1) { + Py_DECREF(x); + goto Done; + } + whole_us = 2.0 * round((leftover_us + x_is_odd) * 0.5) - x_is_odd; + } + + temp = PyLong_FromLong(whole_us); + if (temp == NULL) { Py_DECREF(x); goto Done; @@ -5351,12 +5364,12 @@ assert(DI100Y == 25 * DI4Y - 1); assert(DI100Y == days_before_year(100+1)); - us_per_us = PyLong_FromLong(1); + one = PyLong_FromLong(1); us_per_ms = PyLong_FromLong(1000); us_per_second = PyLong_FromLong(1000000); us_per_minute = PyLong_FromLong(60000000); seconds_per_day = PyLong_FromLong(24 * 3600); - if (us_per_us == NULL || us_per_ms == NULL || us_per_second == NULL || + if (one == NULL || us_per_ms == NULL || us_per_second == NULL || us_per_minute == NULL || seconds_per_day == NULL) return NULL; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 20:52:56 2013 From: python-checkins at python.org (alexander.belopolsky) Date: Sun, 4 Aug 2013 20:52:56 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_default_-=3E_default?= =?utf-8?b?KTogTWVyZ2Uu?= Message-ID: <3c7WT44hyVz7Ljd@mail.python.org> http://hg.python.org/cpython/rev/fb7c6803ba70 changeset: 85025:fb7c6803ba70 parent: 85024:f7c84ef35b00 parent: 85023:f1cb19264a1f user: Alexander Belopolsky date: Sun Aug 04 14:52:42 2013 -0400 summary: Merge. files: Objects/unicodeobject.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -3235,7 +3235,7 @@ wchar_t *wstr; PyObject *bytes = NULL; char *errmsg; - PyObject *reason; + PyObject *reason = NULL; PyObject *exc; size_t error_pos; int surrogateescape; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 21:04:25 2013 From: python-checkins at python.org (raymond.hettinger) Date: Sun, 4 Aug 2013 21:04:25 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_Silence_compil?= =?utf-8?q?er_warning_for_an_unused_declaration?= Message-ID: <3c7WkK0N03z7LjM@mail.python.org> http://hg.python.org/cpython/rev/531df2108bf5 changeset: 85026:531df2108bf5 branch: 3.3 parent: 85022:ed8d94e4862a user: Raymond Hettinger date: Sun Aug 04 12:00:36 2013 -0700 summary: Silence compiler warning for an unused declaration files: Objects/setobject.c | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -214,7 +214,6 @@ set_insert_key(register PySetObject *so, PyObject *key, Py_hash_t hash) { register setentry *entry; - typedef setentry *(*lookupfunc)(PySetObject *, PyObject *, Py_hash_t); assert(so->lookup != NULL); entry = so->lookup(so, key, hash); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 21:04:26 2013 From: python-checkins at python.org (raymond.hettinger) Date: Sun, 4 Aug 2013 21:04:26 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_merge?= Message-ID: <3c7WkL2DLjz7Ljd@mail.python.org> http://hg.python.org/cpython/rev/46df6526438f changeset: 85027:46df6526438f parent: 85025:fb7c6803ba70 parent: 85026:531df2108bf5 user: Raymond Hettinger date: Sun Aug 04 12:04:11 2013 -0700 summary: merge files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 21:19:59 2013 From: python-checkins at python.org (alexander.belopolsky) Date: Sun, 4 Aug 2013 21:19:59 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Reuse_us=5Fper=5Fsecond_in?= =?utf-8?q?_delta=5Ftotal=5Fseconds=2E?= Message-ID: <3c7X4H2LC5z7LjM@mail.python.org> http://hg.python.org/cpython/rev/604e3199acc0 changeset: 85028:604e3199acc0 parent: 85025:fb7c6803ba70 user: Alexander Belopolsky date: Sun Aug 04 15:18:58 2013 -0400 summary: Reuse us_per_second in delta_total_seconds. files: Modules/_datetimemodule.c | 10 +--------- 1 files changed, 1 insertions(+), 9 deletions(-) diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -2252,22 +2252,14 @@ { PyObject *total_seconds; PyObject *total_microseconds; - PyObject *one_million; total_microseconds = delta_to_microseconds((PyDateTime_Delta *)self); if (total_microseconds == NULL) return NULL; - one_million = PyLong_FromLong(1000000L); - if (one_million == NULL) { - Py_DECREF(total_microseconds); - return NULL; - } - - total_seconds = PyNumber_TrueDivide(total_microseconds, one_million); + total_seconds = PyNumber_TrueDivide(total_microseconds, us_per_second); Py_DECREF(total_microseconds); - Py_DECREF(one_million); return total_seconds; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 21:20:00 2013 From: python-checkins at python.org (alexander.belopolsky) Date: Sun, 4 Aug 2013 21:20:00 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_default_-=3E_default?= =?utf-8?b?KTogTWVyZ2Uu?= Message-ID: <3c7X4J44Bmz7Ljd@mail.python.org> http://hg.python.org/cpython/rev/168768bf428d changeset: 85029:168768bf428d parent: 85028:604e3199acc0 parent: 85027:46df6526438f user: Alexander Belopolsky date: Sun Aug 04 15:19:49 2013 -0400 summary: Merge. files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 21:40:22 2013 From: python-checkins at python.org (terry.reedy) Date: Sun, 4 Aug 2013 21:40:22 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4MTUx?= =?utf-8?q?=3A_Replace_remaining_Idle_=27open=2E=2E=2Eclose=27_pairs_with_?= =?utf-8?b?J3dpdGggb3Blbicu?= Message-ID: <3c7XWp4jfPz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/e450e85e2075 changeset: 85030:e450e85e2075 branch: 3.3 parent: 85026:531df2108bf5 user: Terry Jan Reedy date: Sun Aug 04 15:39:03 2013 -0400 summary: Issue #18151: Replace remaining Idle 'open...close' pairs with 'with open'. files: Lib/idlelib/EditorWindow.py | 7 ++----- Lib/idlelib/IOBinding.py | 15 ++++++--------- Lib/idlelib/ScriptBinding.py | 5 ++--- 3 files changed, 10 insertions(+), 17 deletions(-) diff --git a/Lib/idlelib/EditorWindow.py b/Lib/idlelib/EditorWindow.py --- a/Lib/idlelib/EditorWindow.py +++ b/Lib/idlelib/EditorWindow.py @@ -882,12 +882,9 @@ "Load and update the recent files list and menus" rf_list = [] if os.path.exists(self.recent_files_path): - rf_list_file = open(self.recent_files_path,'r', - encoding='utf_8', errors='replace') - try: + with open(self.recent_files_path, 'r', + encoding='utf_8', errors='replace') as rf_list_file: rf_list = rf_list_file.readlines() - finally: - rf_list_file.close() if new_file: new_file = os.path.abspath(new_file) + '\n' if new_file in rf_list: diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py --- a/Lib/idlelib/IOBinding.py +++ b/Lib/idlelib/IOBinding.py @@ -208,11 +208,10 @@ try: # open the file in binary mode so that we can handle # end-of-line convention ourselves. - f = open(filename,'rb') - two_lines = f.readline() + f.readline() - f.seek(0) - bytes = f.read() - f.close() + with open(filename, 'rb') as f: + two_lines = f.readline() + f.readline() + f.seek(0) + bytes = f.read() except OSError as msg: tkMessageBox.showerror("I/O Error", str(msg), master=self.text) return False @@ -373,10 +372,8 @@ text = text.replace("\n", self.eol_convention) chars = self.encode(text) try: - f = open(filename, "wb") - f.write(chars) - f.flush() - f.close() + with open(filename, "wb") as f: + f.write(chars) return True except OSError as msg: tkMessageBox.showerror("I/O Error", str(msg), diff --git a/Lib/idlelib/ScriptBinding.py b/Lib/idlelib/ScriptBinding.py --- a/Lib/idlelib/ScriptBinding.py +++ b/Lib/idlelib/ScriptBinding.py @@ -87,9 +87,8 @@ self.shell = shell = self.flist.open_shell() saved_stream = shell.get_warning_stream() shell.set_warning_stream(shell.stderr) - f = open(filename, 'rb') - source = f.read() - f.close() + with open(filename, 'rb') as f: + source = f.read() if b'\r' in source: source = source.replace(b'\r\n', b'\n') source = source.replace(b'\r', b'\n') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 21:40:23 2013 From: python-checkins at python.org (terry.reedy) Date: Sun, 4 Aug 2013 21:40:23 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_with_3=2E3?= Message-ID: <3c7XWq6bLMz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/af4105810309 changeset: 85031:af4105810309 parent: 85029:168768bf428d parent: 85030:e450e85e2075 user: Terry Jan Reedy date: Sun Aug 04 15:39:32 2013 -0400 summary: Merge with 3.3 files: Lib/idlelib/EditorWindow.py | 7 ++----- Lib/idlelib/IOBinding.py | 15 ++++++--------- Lib/idlelib/ScriptBinding.py | 5 ++--- 3 files changed, 10 insertions(+), 17 deletions(-) diff --git a/Lib/idlelib/EditorWindow.py b/Lib/idlelib/EditorWindow.py --- a/Lib/idlelib/EditorWindow.py +++ b/Lib/idlelib/EditorWindow.py @@ -882,12 +882,9 @@ "Load and update the recent files list and menus" rf_list = [] if os.path.exists(self.recent_files_path): - rf_list_file = open(self.recent_files_path,'r', - encoding='utf_8', errors='replace') - try: + with open(self.recent_files_path, 'r', + encoding='utf_8', errors='replace') as rf_list_file: rf_list = rf_list_file.readlines() - finally: - rf_list_file.close() if new_file: new_file = os.path.abspath(new_file) + '\n' if new_file in rf_list: diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py --- a/Lib/idlelib/IOBinding.py +++ b/Lib/idlelib/IOBinding.py @@ -208,11 +208,10 @@ try: # open the file in binary mode so that we can handle # end-of-line convention ourselves. - f = open(filename,'rb') - two_lines = f.readline() + f.readline() - f.seek(0) - bytes = f.read() - f.close() + with open(filename, 'rb') as f: + two_lines = f.readline() + f.readline() + f.seek(0) + bytes = f.read() except OSError as msg: tkMessageBox.showerror("I/O Error", str(msg), master=self.text) return False @@ -373,10 +372,8 @@ text = text.replace("\n", self.eol_convention) chars = self.encode(text) try: - f = open(filename, "wb") - f.write(chars) - f.flush() - f.close() + with open(filename, "wb") as f: + f.write(chars) return True except OSError as msg: tkMessageBox.showerror("I/O Error", str(msg), diff --git a/Lib/idlelib/ScriptBinding.py b/Lib/idlelib/ScriptBinding.py --- a/Lib/idlelib/ScriptBinding.py +++ b/Lib/idlelib/ScriptBinding.py @@ -87,9 +87,8 @@ self.shell = shell = self.flist.open_shell() saved_stream = shell.get_warning_stream() shell.set_warning_stream(shell.stderr) - f = open(filename, 'rb') - source = f.read() - f.close() + with open(filename, 'rb') as f: + source = f.read() if b'\r' in source: source = source.replace(b'\r\n', b'\n') source = source.replace(b'\r', b'\n') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 21:40:25 2013 From: python-checkins at python.org (terry.reedy) Date: Sun, 4 Aug 2013 21:40:25 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4MTUx?= =?utf-8?q?=3A_Replace_remaining_Idle_=27open=2E=2E=2Eclose=27_pairs_with_?= =?utf-8?b?J3dpdGggb3Blbicu?= Message-ID: <3c7XWs1YQtz7Ljk@mail.python.org> http://hg.python.org/cpython/rev/7f6661a90d02 changeset: 85032:7f6661a90d02 branch: 2.7 parent: 85018:b3efc140d8a6 user: Terry Jan Reedy date: Sun Aug 04 15:39:56 2013 -0400 summary: Issue #18151: Replace remaining Idle 'open...close' pairs with 'with open'. files: Lib/idlelib/EditorWindow.py | 5 +---- Lib/idlelib/IOBinding.py | 11 ++++------- Lib/idlelib/ScriptBinding.py | 5 ++--- 3 files changed, 7 insertions(+), 14 deletions(-) diff --git a/Lib/idlelib/EditorWindow.py b/Lib/idlelib/EditorWindow.py --- a/Lib/idlelib/EditorWindow.py +++ b/Lib/idlelib/EditorWindow.py @@ -894,11 +894,8 @@ "Load and update the recent files list and menus" rf_list = [] if os.path.exists(self.recent_files_path): - rf_list_file = open(self.recent_files_path,'r') - try: + with open(self.recent_files_path, 'r') as rf_list_file: rf_list = rf_list_file.readlines() - finally: - rf_list_file.close() if new_file: new_file = os.path.abspath(new_file) + '\n' if new_file in rf_list: diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py --- a/Lib/idlelib/IOBinding.py +++ b/Lib/idlelib/IOBinding.py @@ -248,9 +248,8 @@ try: # open the file in binary mode so that we can handle # end-of-line convention ourselves. - f = open(filename,'rb') - chars = f.read() - f.close() + with open(filename, 'rb') as f: + chars = f.read() except IOError as msg: tkMessageBox.showerror("I/O Error", str(msg), master=self.text) return False @@ -383,10 +382,8 @@ if self.eol_convention != "\n": chars = chars.replace("\n", self.eol_convention) try: - f = open(filename, "wb") - f.write(chars) - f.flush() - f.close() + with open(filename, "wb") as f: + f.write(chars) return True except IOError as msg: tkMessageBox.showerror("I/O Error", str(msg), diff --git a/Lib/idlelib/ScriptBinding.py b/Lib/idlelib/ScriptBinding.py --- a/Lib/idlelib/ScriptBinding.py +++ b/Lib/idlelib/ScriptBinding.py @@ -87,9 +87,8 @@ self.shell = shell = self.flist.open_shell() saved_stream = shell.get_warning_stream() shell.set_warning_stream(shell.stderr) - f = open(filename, 'r') - source = f.read() - f.close() + with open(filename, 'r') as f: + source = f.read() if '\r' in source: source = re.sub(r"\r\n", "\n", source) source = re.sub(r"\r", "\n", source) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 21:44:41 2013 From: python-checkins at python.org (raymond.hettinger) Date: Sun, 4 Aug 2013 21:44:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_Silence_compil?= =?utf-8?q?er_warnings_for_strict_function_prototype_declarations=2E?= Message-ID: <3c7Xcn2QQlz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/24279f8b779d changeset: 85033:24279f8b779d branch: 3.3 parent: 85030:e450e85e2075 user: Raymond Hettinger date: Sun Aug 04 12:43:37 2013 -0700 summary: Silence compiler warnings for strict function prototype declarations. files: Modules/_ctypes/libffi_osx/x86/x86-ffi64.c | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c b/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c --- a/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c +++ b/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c @@ -46,7 +46,7 @@ unsigned long bytes, unsigned flags, void* raddr, - void (*fnaddr)(), + void (*fnaddr)(void), unsigned ssecount); /* All reference to register classes here is identical to the code in @@ -429,7 +429,7 @@ void ffi_call( ffi_cif* cif, - void (*fn)(), + void (*fn)(void), void* rvalue, void** avalue) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 21:44:42 2013 From: python-checkins at python.org (raymond.hettinger) Date: Sun, 4 Aug 2013 21:44:42 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_merge?= Message-ID: <3c7Xcp4LY2z7Ljh@mail.python.org> http://hg.python.org/cpython/rev/c4d770c93c66 changeset: 85034:c4d770c93c66 parent: 85031:af4105810309 parent: 85033:24279f8b779d user: Raymond Hettinger date: Sun Aug 04 12:43:59 2013 -0700 summary: merge files: Modules/_ctypes/libffi_osx/x86/x86-ffi64.c | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c b/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c --- a/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c +++ b/Modules/_ctypes/libffi_osx/x86/x86-ffi64.c @@ -46,7 +46,7 @@ unsigned long bytes, unsigned flags, void* raddr, - void (*fnaddr)(), + void (*fnaddr)(void), unsigned ssecount); /* All reference to register classes here is identical to the code in @@ -431,7 +431,7 @@ void ffi_call( ffi_cif* cif, - void (*fn)(), + void (*fn)(void), void* rvalue, void** avalue) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 21:50:21 2013 From: python-checkins at python.org (r.david.murray) Date: Sun, 4 Aug 2013 21:50:21 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NjU3OiByZW1v?= =?utf-8?q?ve_duplicate_entries_from_Misc/ACKS=2E?= Message-ID: <3c7XlK65YZzS7H@mail.python.org> http://hg.python.org/cpython/rev/2ec323dadca2 changeset: 85035:2ec323dadca2 branch: 3.3 parent: 85033:24279f8b779d user: R David Murray date: Sun Aug 04 15:48:29 2013 -0400 summary: #18657: remove duplicate entries from Misc/ACKS. Patch by Madison May. files: Misc/ACKS | 3 --- 1 files changed, 0 insertions(+), 3 deletions(-) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -279,7 +279,6 @@ Kushal Das Jonathan Dasteel Pierre-Yves David -Xavier de Gaye John DeGood Ned Deily Vincent Delft @@ -341,7 +340,6 @@ Phil Elson David Ely Jeff Epler -Jeff McNeil Tom Epperly Stoffel Erasmus J?rgen A. Erhard @@ -617,7 +615,6 @@ Peter van Kampen Rafe Kaplan Jacob Kaplan-Moss -Jan Kaliszewski Janne Karila Per ?yvind Karlsen Anton Kasyanov -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 4 21:50:23 2013 From: python-checkins at python.org (r.david.murray) Date: Sun, 4 Aug 2013 21:50:23 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge=3A_=2318657=3A_remove_duplicate_entries_from_Misc/?= =?utf-8?q?ACKS=2E?= Message-ID: <3c7XlM0x6fz7Ljx@mail.python.org> http://hg.python.org/cpython/rev/f4f81ebc3de9 changeset: 85036:f4f81ebc3de9 parent: 85034:c4d770c93c66 parent: 85035:2ec323dadca2 user: R David Murray date: Sun Aug 04 15:50:08 2013 -0400 summary: Merge: #18657: remove duplicate entries from Misc/ACKS. files: Misc/ACKS | 3 --- 1 files changed, 0 insertions(+), 3 deletions(-) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -288,7 +288,6 @@ Kushal Das Jonathan Dasteel Pierre-Yves David -Xavier de Gaye John DeGood Ned Deily Vincent Delft @@ -351,7 +350,6 @@ Phil Elson David Ely Jeff Epler -Jeff McNeil Tom Epperly G?kcen Eraslan Stoffel Erasmus @@ -632,7 +630,6 @@ Peter van Kampen Rafe Kaplan Jacob Kaplan-Moss -Jan Kaliszewski Janne Karila Per ?yvind Karlsen Anton Kasyanov -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Mon Aug 5 05:46:56 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Mon, 05 Aug 2013 05:46:56 +0200 Subject: [Python-checkins] Daily reference leaks (f4f81ebc3de9): sum=0 Message-ID: results for f4f81ebc3de9 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogzklFSK', '-x'] From python-checkins at python.org Mon Aug 5 07:35:45 2013 From: python-checkins at python.org (raymond.hettinger) Date: Mon, 5 Aug 2013 07:35:45 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_Silence_compil?= =?utf-8?q?er_warning_for_unused_declaration=2E?= Message-ID: <3c7nkn4NWlzSsw@mail.python.org> http://hg.python.org/cpython/rev/3571a921a3d6 changeset: 85037:3571a921a3d6 branch: 2.7 parent: 85032:7f6661a90d02 user: Raymond Hettinger date: Sun Aug 04 22:35:37 2013 -0700 summary: Silence compiler warning for unused declaration. files: Objects/setobject.c | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -212,7 +212,6 @@ set_insert_key(register PySetObject *so, PyObject *key, long hash) { register setentry *entry; - typedef setentry *(*lookupfunc)(PySetObject *, PyObject *, long); assert(so->lookup != NULL); entry = so->lookup(so, key, hash); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 5 08:54:55 2013 From: python-checkins at python.org (jason.coombs) Date: Mon, 5 Aug 2013 08:54:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_18532=3A_Added_tests?= =?utf-8?q?_and_documentation_to_formally_specify_the_=2Ename?= Message-ID: <3c7qV71nBDz7Llf@mail.python.org> http://hg.python.org/cpython/rev/238c37e4c395 changeset: 85038:238c37e4c395 parent: 85036:f4f81ebc3de9 user: Jason R. Coombs date: Sat Aug 03 11:39:39 2013 +0200 summary: Issue 18532: Added tests and documentation to formally specify the .name attribute on hashlib objects. files: Doc/library/hashlib.rst | 12 ++++++++++++ Lib/test/test_hashlib.py | 5 +++++ 2 files changed, 17 insertions(+), 0 deletions(-) diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst --- a/Doc/library/hashlib.rst +++ b/Doc/library/hashlib.rst @@ -124,6 +124,18 @@ The internal block size of the hash algorithm in bytes. +A hash object has the following attributes: + +.. attribute:: hash.name + + The canonical name of this hash, always lowercase and always suitable as a + parameter to :func:`new` to create another hash of this type. + + .. versionchanged:: 3.4 + The name attribute has been present in CPython since its inception, but + until Python 3.4 was not formally specified, so may not exist on some + platforms. + A hash object has the following methods: diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -154,6 +154,11 @@ assert isinstance(h.digest(), bytes), name self.assertEqual(hexstr(h.digest()), h.hexdigest()) + def test_name_attribute(self): + for cons in self.hash_constructors: + h = cons() + assert isinstance(h.name, str), "No name attribute" + assert h.name in self.supported_hash_names def test_large_update(self): aas = b'a' * 128 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 5 11:30:42 2013 From: python-checkins at python.org (mark.dickinson) Date: Mon, 5 Aug 2013 11:30:42 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318659=3A_fix_test?= =?utf-8?q?=5Fformat_test_that_wasn=27t_being_executed=2E__Thanks_Vajrasky?= Message-ID: <3c7txt3KqRz7LjX@mail.python.org> http://hg.python.org/cpython/rev/cfd875bcbe41 changeset: 85039:cfd875bcbe41 user: Mark Dickinson date: Mon Aug 05 10:30:14 2013 +0100 summary: Issue #18659: fix test_format test that wasn't being executed. Thanks Vajrasky Kok for the patch. files: Lib/test/test_format.py | 10 +++------- 1 files changed, 3 insertions(+), 7 deletions(-) diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py --- a/Lib/test/test_format.py +++ b/Lib/test/test_format.py @@ -327,10 +327,6 @@ self.assertIs(text % (), text) self.assertIs(text.format(), text) - -def test_main(): - support.run_unittest(FormatTest) - def test_precision(self): INT_MAX = 2147483647 @@ -342,10 +338,10 @@ self.assertEqual(str(cm.exception), "precision too big") c = complex(f) - self.assertEqual(format(f, ".0f"), "1") - self.assertEqual(format(f, ".3f"), "1.200") + self.assertEqual(format(c, ".0f"), "1+0j") + self.assertEqual(format(c, ".3f"), "1.200+0.000j") with self.assertRaises(ValueError) as cm: - format(f, ".%sf" % (INT_MAX + 1)) + format(c, ".%sf" % (INT_MAX + 1)) self.assertEqual(str(cm.exception), "precision too big") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 5 12:00:09 2013 From: python-checkins at python.org (mark.dickinson) Date: Mon, 5 Aug 2013 12:00:09 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318659=3A_Backed_o?= =?utf-8?q?ut_changeset_cfd875bcbe41_after_buildbot_failures=2E?= Message-ID: <3c7vbs26Stz7LlX@mail.python.org> http://hg.python.org/cpython/rev/9bee1fd64ee6 changeset: 85040:9bee1fd64ee6 user: Mark Dickinson date: Mon Aug 05 10:59:36 2013 +0100 summary: Issue #18659: Backed out changeset cfd875bcbe41 after buildbot failures. files: Lib/test/test_format.py | 10 +++++++--- 1 files changed, 7 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py --- a/Lib/test/test_format.py +++ b/Lib/test/test_format.py @@ -327,6 +327,10 @@ self.assertIs(text % (), text) self.assertIs(text.format(), text) + +def test_main(): + support.run_unittest(FormatTest) + def test_precision(self): INT_MAX = 2147483647 @@ -338,10 +342,10 @@ self.assertEqual(str(cm.exception), "precision too big") c = complex(f) - self.assertEqual(format(c, ".0f"), "1+0j") - self.assertEqual(format(c, ".3f"), "1.200+0.000j") + self.assertEqual(format(f, ".0f"), "1") + self.assertEqual(format(f, ".3f"), "1.200") with self.assertRaises(ValueError) as cm: - format(c, ".%sf" % (INT_MAX + 1)) + format(f, ".%sf" % (INT_MAX + 1)) self.assertEqual(str(cm.exception), "precision too big") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 5 16:39:39 2013 From: python-checkins at python.org (nick.coghlan) Date: Mon, 5 Aug 2013 16:39:39 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Add_PEP_449=3A_Removal_of_*?= =?utf-8?q?=2Epypi=2Epython=2Eorg_mirrors?= Message-ID: <3c81pM4gk4z7LkQ@mail.python.org> http://hg.python.org/peps/rev/565b8ea7504f changeset: 5033:565b8ea7504f user: Nick Coghlan date: Tue Aug 06 00:39:29 2013 +1000 summary: Add PEP 449: Removal of *.pypi.python.org mirrors files: pep-0449.txt | 108 +++++++++++++++++++++++++++++++++++++++ 1 files changed, 108 insertions(+), 0 deletions(-) diff --git a/pep-0449.txt b/pep-0449.txt new file mode 100644 --- /dev/null +++ b/pep-0449.txt @@ -0,0 +1,108 @@ +PEP: 449 +Title: Removal of Official Public PyPI Mirrors +Version: $Revision$ +Last-Modified: $Date$ +Author: Donald Stufft +BDFL-Delegate: Richard Jones +Discussions-To: distutils-sig at python.org +Status: Draft +Type: Process +Content-Type: text/x-rst +Created: 04-Aug-2013 +Post-History: 04-Aug-2013 +Replaces: 381 + + +Abstract +======== + +This PEP provides a path to deprecate and ultimately remove the official +public mirroring infrastructure for `PyPI`_. It does not propose the removal +of mirroring support in general. + + +Rationale +========= + +The PyPI mirroring infrastructure (defined in `PEP381`_) provides a means to +mirror the content of PyPI used by the automatic installers. It also provides +a method for autodiscovery of mirrors and a consistent naming scheme. + +There are a number of problems with the official public mirrors: + +* They give control over a \*.python.org domain name to a third party, + allowing that third party to set or read cookies on the pypi.python.org and + python.org domain name. +* The use of a sub domain of pypi.python.org means that the mirror operators + will never be able to get a SSL certificate of their own, and giving them + one for a python.org domain name is unlikely to happen. +* They are often out of date, most often by several hours to a few days, but + regularly several days and even months. +* With the introduction of the CDN on PyPI the public mirroring infrastructure + is not as important as it once was as the CDN is also a globally distributed + network of servers which will function even if PyPI is down. +* Although there is provisions in place for it, there is currently no known + installer which uses the authenticity checks discussed in `PEP381`_ which + means that any download from a mirror is subject to attack by a malicious + mirror operator, but further more due to the lack of TLS it also means that + any download from a mirror is also subject to a MITM attack. +* They have only ever been implemented by one installer (pip), and its + implementation, besides being insecure, has serious issues with performance + and is slated for removal with it's next release (1.5). + +Due to the number of issues, some of them very serious, and the CDN which more +or less provides much of the same benefits this PEP proposes to first +deprecate and then remove the public mirroring infrastructure. The ability to +mirror and the method of mirroring will not be affected and the existing +public mirrors are encouraged to acquire their own domains to host their +mirrors on if they wish to continue hosting them. + + +Plan for Deprecation & Removal +============================== + +Immediately upon acceptance of this PEP documentation on PyPI will be updated +to reflect the deprecated nature of the official public mirrors and will +direct users to external resources like http://www.pypi-mirrors.org/ to +discover unofficial public mirrors if they wish to use one. + +On October 1st, 2013, roughly 2 months from the date of this PEP, the DNS names +of the public mirrors ([a-g].pypi.python.org) will be changed to point back to +PyPI which will be modified to accept requests from those domains. At this +point in time the public mirrors will be considered deprecated. + +Then, roughly 2 months after the release of the first version of pip to have +mirroring support removed (currently slated for pip 1.5) the DNS entries for +[a-g].pypi.python.org and last.pypi.python.org will be removed and PyPI will +no longer accept requests at those domains. + + +Unofficial Public or Private Mirrors +==================================== + +The mirroring protocol will continue to exist as defined in `PEP381`_ and +people are encouraged to utilize to host unofficial public and private mirrors +if they so desire. For operators of unofficial public or private mirrors the +recommended mirroring client is `Bandersnatch`_. + + +.. _PyPI: https://pypi.python.org/ +.. _PEP381: http://www.python.org/dev/peps/pep-0381/ +.. _Bandersnatch: https://pypi.python.org/pypi/bandersnatch + + +Copyright +========= + +This document has been placed in the public domain. + + + +.. + Local Variables: + mode: indented-text + indent-tabs-mode: nil + sentence-end-double-space: t + fill-column: 70 + coding: utf-8 + End: -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Mon Aug 5 18:58:15 2013 From: python-checkins at python.org (mark.dickinson) Date: Mon, 5 Aug 2013 18:58:15 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NjYx?= =?utf-8?q?=3A_typo_in_grp=2Estruct=5Fgroup_docstring=2E?= Message-ID: <3c84tH7264z7Ln3@mail.python.org> http://hg.python.org/cpython/rev/395ac61ebe1a changeset: 85041:395ac61ebe1a branch: 2.7 parent: 85037:3571a921a3d6 user: Mark Dickinson date: Mon Aug 05 17:56:17 2013 +0100 summary: Issue #18661: typo in grp.struct_group docstring. files: Modules/grpmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/grpmodule.c b/Modules/grpmodule.c --- a/Modules/grpmodule.c +++ b/Modules/grpmodule.c @@ -11,7 +11,7 @@ {"gr_name", "group name"}, {"gr_passwd", "password"}, {"gr_gid", "group id"}, - {"gr_mem", "group memebers"}, + {"gr_mem", "group members"}, {0} }; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 5 18:58:17 2013 From: python-checkins at python.org (mark.dickinson) Date: Mon, 5 Aug 2013 18:58:17 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NjYx?= =?utf-8?q?=3A_typo_in_grp=2Estruct=5Fgroup_docstring=2E__Thanks_Vajrasky_?= =?utf-8?q?Kok=2E?= Message-ID: <3c84tL0nmSz7Lmg@mail.python.org> http://hg.python.org/cpython/rev/791034a0ae1e changeset: 85042:791034a0ae1e branch: 3.3 parent: 85035:2ec323dadca2 user: Mark Dickinson date: Mon Aug 05 17:57:01 2013 +0100 summary: Issue #18661: typo in grp.struct_group docstring. Thanks Vajrasky Kok. files: Modules/grpmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/grpmodule.c b/Modules/grpmodule.c --- a/Modules/grpmodule.c +++ b/Modules/grpmodule.c @@ -10,7 +10,7 @@ {"gr_name", "group name"}, {"gr_passwd", "password"}, {"gr_gid", "group id"}, - {"gr_mem", "group memebers"}, + {"gr_mem", "group members"}, {0} }; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 5 18:58:19 2013 From: python-checkins at python.org (mark.dickinson) Date: Mon, 5 Aug 2013 18:58:19 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318661=3A_typo_in_grp=2Estruct=5Fgroup_docstring?= =?utf-8?q?_=28fix_merged_from_3=2E3=29=2E__Thanks?= Message-ID: <3c84tM3PNzz7Ln2@mail.python.org> http://hg.python.org/cpython/rev/f534960c2c02 changeset: 85043:f534960c2c02 parent: 85040:9bee1fd64ee6 parent: 85042:791034a0ae1e user: Mark Dickinson date: Mon Aug 05 17:57:54 2013 +0100 summary: Issue #18661: typo in grp.struct_group docstring (fix merged from 3.3). Thanks Vajrasky Kok. files: Modules/grpmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/grpmodule.c b/Modules/grpmodule.c --- a/Modules/grpmodule.c +++ b/Modules/grpmodule.c @@ -10,7 +10,7 @@ {"gr_name", "group name"}, {"gr_passwd", "password"}, {"gr_gid", "group id"}, - {"gr_mem", "group memebers"}, + {"gr_mem", "group members"}, {0} }; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 5 22:14:54 2013 From: python-checkins at python.org (gregory.p.smith) Date: Mon, 5 Aug 2013 22:14:54 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_*_Fix_the_assertions_in_ha?= =?utf-8?q?shlib_to_use_unittest_assertion_methods_instead_of?= Message-ID: <3c89FB0dQtzRhx@mail.python.org> http://hg.python.org/cpython/rev/c6d4564dc86f changeset: 85044:c6d4564dc86f user: Gregory P. Smith date: Mon Aug 05 13:14:37 2013 -0700 summary: * Fix the assertions in hashlib to use unittest assertion methods instead of evil assert statements. * Add an additional assert to the new test_name_attribute test that actually confirms that a hash created using each h.name results in a new hash sharing the same name. files: Lib/test/test_hashlib.py | 15 ++++++++------- 1 files changed, 8 insertions(+), 7 deletions(-) diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -75,8 +75,8 @@ if _hashlib: # These two algorithms should always be present when this module # is compiled. If not, something was compiled wrong. - assert hasattr(_hashlib, 'openssl_md5') - assert hasattr(_hashlib, 'openssl_sha1') + self.assertTrue(hasattr(_hashlib, 'openssl_md5')) + self.assertTrue(hasattr(_hashlib, 'openssl_sha1')) for algorithm, constructors in self.constructors_to_test.items(): constructor = getattr(_hashlib, 'openssl_'+algorithm, None) if constructor: @@ -151,14 +151,15 @@ def test_hexdigest(self): for cons in self.hash_constructors: h = cons() - assert isinstance(h.digest(), bytes), name + self.assertIsInstance(h.digest(), bytes) self.assertEqual(hexstr(h.digest()), h.hexdigest()) def test_name_attribute(self): for cons in self.hash_constructors: h = cons() - assert isinstance(h.name, str), "No name attribute" - assert h.name in self.supported_hash_names + self.assertIsInstance(h.name, str) + self.assertIn(h.name, self.supported_hash_names) + self.assertEqual(h.name, hashlib.new(h.name).name) def test_large_update(self): aas = b'a' * 128 @@ -532,8 +533,8 @@ events = [] for threadnum in range(num_threads): chunk_size = len(data) // (10**threadnum) - assert chunk_size > 0 - assert chunk_size % len(smallest_data) == 0 + self.assertGreater(chunk_size, 0) + self.assertEqual(chunk_size % len(smallest_data), 0) event = threading.Event() events.append(event) threading.Thread(target=hash_in_chunks, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 5 23:17:43 2013 From: python-checkins at python.org (antoine.pitrou) Date: Mon, 5 Aug 2013 23:17:43 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=234885=3A_Add_weakr?= =?utf-8?q?ef_support_to_mmap_objects=2E__Patch_by_Valerie_Lambert=2E?= Message-ID: <3c8Bdg6B2Fz7LjM@mail.python.org> http://hg.python.org/cpython/rev/1754b7900da1 changeset: 85045:1754b7900da1 user: Antoine Pitrou date: Mon Aug 05 23:17:30 2013 +0200 summary: Issue #4885: Add weakref support to mmap objects. Patch by Valerie Lambert. files: Lib/test/test_mmap.py | 12 +++++++++++- Misc/ACKS | 1 + Misc/NEWS | 5 ++++- Modules/mmapmodule.c | 8 +++++++- 4 files changed, 23 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_mmap.py b/Lib/test/test_mmap.py --- a/Lib/test/test_mmap.py +++ b/Lib/test/test_mmap.py @@ -1,11 +1,12 @@ from test.support import (TESTFN, run_unittest, import_module, unlink, - requires, _2G, _4G) + requires, _2G, _4G, gc_collect) import unittest import os import re import itertools import socket import sys +import weakref # Skip test if we can't import mmap. mmap = import_module('mmap') @@ -692,6 +693,15 @@ "wrong exception raised in context manager") self.assertTrue(m.closed, "context manager failed") + def test_weakref(self): + # Check mmap objects are weakrefable + mm = mmap.mmap(-1, 16) + wr = weakref.ref(mm) + self.assertIs(wr(), mm) + del mm + gc_collect() + self.assertIs(wr(), None) + class LargeMmapTests(unittest.TestCase): def setUp(self): diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -701,6 +701,7 @@ Cameron Laird David Lam Thomas Lamb +Valerie Lambert Jean-Baptiste "Jiba" Lamy Ronan Lamy Torsten Landschoff diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -13,7 +13,10 @@ Library ------- -- Issue 8860: Fixed rounding in timedelta constructor. +- Issue #4885: Add weakref support to mmap objects. Patch by Valerie Lambert. + +- Issue #8860: Fixed rounding in timedelta constructor. + What's New in Python 3.4.0 Alpha 1? =================================== diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -20,6 +20,7 @@ #define PY_SSIZE_T_CLEAN #include +#include "structmember.h" #ifndef MS_WINDOWS #define UNIX @@ -108,6 +109,7 @@ int fd; #endif + PyObject *weakreflist; access_mode access; } mmap_object; @@ -134,6 +136,8 @@ } #endif /* UNIX */ + if (m_obj->weakreflist != NULL) + PyObject_ClearWeakRefs((PyObject *) m_obj); Py_TYPE(m_obj)->tp_free((PyObject*)m_obj); } @@ -1032,7 +1036,7 @@ 0, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ + offsetof(mmap_object, weakreflist), /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ mmap_object_methods, /* tp_methods */ @@ -1190,6 +1194,7 @@ m_obj->data = NULL; m_obj->size = (size_t) map_size; m_obj->pos = (size_t) 0; + m_obj->weakreflist = NULL; m_obj->exports = 0; m_obj->offset = offset; if (fd == -1) { @@ -1394,6 +1399,7 @@ /* set the initial position */ m_obj->pos = (size_t) 0; + m_obj->weakreflist = NULL; m_obj->exports = 0; /* set the tag name */ if (tagname != NULL && *tagname != '\0') { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 5 23:28:02 2013 From: python-checkins at python.org (antoine.pitrou) Date: Mon, 5 Aug 2013 23:28:02 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2317934=3A_Add_a_cl?= =?utf-8?q?ear=28=29_method_to_frame_objects=2C_to_help_clean_up_expensive?= Message-ID: <3c8BsZ0XFlzS7H@mail.python.org> http://hg.python.org/cpython/rev/862ab99ab570 changeset: 85046:862ab99ab570 user: Antoine Pitrou date: Mon Aug 05 23:26:40 2013 +0200 summary: Issue #17934: Add a clear() method to frame objects, to help clean up expensive details (local variables) and break reference cycles. files: Doc/library/inspect.rst | 4 +++ Doc/reference/datamodel.rst | 14 ++++++++++++ Include/frameobject.h | 3 ++ Include/genobject.h | 2 + Lib/test/test_sys.py | 2 +- Lib/test/test_traceback.py | 24 +++++++++++++++++--- Misc/NEWS | 3 ++ Objects/frameobject.c | 28 +++++++++++++++++++++++- Objects/genobject.c | 8 ++++-- Python/ceval.c | 2 + 10 files changed, 80 insertions(+), 10 deletions(-) diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -846,6 +846,10 @@ finally: del frame + If you want to keep the frame around (for example to print a traceback + later), you can also break reference cycles by using the + :meth:`frame.clear` method. + The optional *context* argument supported by most of these functions specifies the number of lines of context to return, which are centered around the current line. diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -934,6 +934,20 @@ frame). A debugger can implement a Jump command (aka Set Next Statement) by writing to f_lineno. + Frame objects support one method: + + .. method:: frame.clear() + + This method clears all references to local variables held by the + frame. Also, if the frame belonged to a generator, the generator + is finalized. This helps break reference cycles involving frame + objects (for example when catching an exception and storing its + traceback for later use). + + :exc:`RuntimeError` is raised if the frame is currently executing. + + .. versionadded:: 3.4 + Traceback objects .. index:: object: traceback diff --git a/Include/frameobject.h b/Include/frameobject.h --- a/Include/frameobject.h +++ b/Include/frameobject.h @@ -36,6 +36,8 @@ non-generator frames. See the save_exc_state and swap_exc_state functions in ceval.c for details of their use. */ PyObject *f_exc_type, *f_exc_value, *f_exc_traceback; + /* Borrowed referenced to a generator, or NULL */ + PyObject *f_gen; PyThreadState *f_tstate; int f_lasti; /* Last instruction if called */ @@ -46,6 +48,7 @@ bytecode index. */ int f_lineno; /* Current line number */ int f_iblock; /* index in f_blockstack */ + char f_executing; /* whether the frame is still executing */ PyTryBlock f_blockstack[CO_MAXBLOCKS]; /* for try and loop blocks */ PyObject *f_localsplus[1]; /* locals+stack, dynamically sized */ } PyFrameObject; diff --git a/Include/genobject.h b/Include/genobject.h --- a/Include/genobject.h +++ b/Include/genobject.h @@ -36,6 +36,8 @@ PyAPI_FUNC(int) PyGen_NeedsFinalizing(PyGenObject *); PyAPI_FUNC(int) _PyGen_FetchStopIterationValue(PyObject **); PyObject *_PyGen_Send(PyGenObject *, PyObject *); +PyAPI_FUNC(void) _PyGen_Finalize(PyObject *self); + #ifdef __cplusplus } diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -764,7 +764,7 @@ nfrees = len(x.f_code.co_freevars) extras = x.f_code.co_stacksize + x.f_code.co_nlocals +\ ncells + nfrees - 1 - check(x, vsize('12P3i' + CO_MAXBLOCKS*'3i' + 'P' + extras*'P')) + check(x, vsize('13P3ic' + CO_MAXBLOCKS*'3i' + 'P' + extras*'P')) # function def func(): pass check(func, size('12P')) diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -150,11 +150,17 @@ class TracebackFormatTests(unittest.TestCase): - def test_traceback_format(self): + def some_exception(self): + raise KeyError('blah') + + def check_traceback_format(self, cleanup_func=None): try: - raise KeyError('blah') + self.some_exception() except KeyError: type_, value, tb = sys.exc_info() + if cleanup_func is not None: + # Clear the inner frames, not this one + cleanup_func(tb.tb_next) traceback_fmt = 'Traceback (most recent call last):\n' + \ ''.join(traceback.format_tb(tb)) file_ = StringIO() @@ -183,12 +189,22 @@ # Make sure that the traceback is properly indented. tb_lines = python_fmt.splitlines() - self.assertEqual(len(tb_lines), 3) - banner, location, source_line = tb_lines + self.assertEqual(len(tb_lines), 5) + banner = tb_lines[0] + location, source_line = tb_lines[-2:] self.assertTrue(banner.startswith('Traceback')) self.assertTrue(location.startswith(' File')) self.assertTrue(source_line.startswith(' raise')) + def test_traceback_format(self): + self.check_traceback_format() + + def test_traceback_format_with_cleared_frames(self): + # Check that traceback formatting also works with a clear()ed frame + def cleanup_tb(tb): + tb.tb_frame.clear() + self.check_traceback_format(cleanup_tb) + def test_stack_format(self): # Verify _stack functions. Note we have to use _getframe(1) to # compare them without this frame appearing in the output diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #17934: Add a clear() method to frame objects, to help clean up + expensive details (local variables) and break reference cycles. + Library ------- diff --git a/Objects/frameobject.c b/Objects/frameobject.c --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -488,7 +488,7 @@ } static void -frame_clear(PyFrameObject *f) +frame_tp_clear(PyFrameObject *f) { PyObject **fastlocals, **p, **oldtop; Py_ssize_t i, slots; @@ -500,6 +500,7 @@ */ oldtop = f->f_stacktop; f->f_stacktop = NULL; + f->f_executing = 0; Py_CLEAR(f->f_exc_type); Py_CLEAR(f->f_exc_value); @@ -520,6 +521,25 @@ } static PyObject * +frame_clear(PyFrameObject *f) +{ + if (f->f_executing) { + PyErr_SetString(PyExc_RuntimeError, + "cannot clear an executing frame"); + return NULL; + } + if (f->f_gen) { + _PyGen_Finalize(f->f_gen); + assert(f->f_gen == NULL); + } + frame_tp_clear(f); + Py_RETURN_NONE; +} + +PyDoc_STRVAR(clear__doc__, +"F.clear(): clear most references held by the frame"); + +static PyObject * frame_sizeof(PyFrameObject *f) { Py_ssize_t res, extras, ncells, nfrees; @@ -538,6 +558,8 @@ "F.__sizeof__() -> size of F in memory, in bytes"); static PyMethodDef frame_methods[] = { + {"clear", (PyCFunction)frame_clear, METH_NOARGS, + clear__doc__}, {"__sizeof__", (PyCFunction)frame_sizeof, METH_NOARGS, sizeof__doc__}, {NULL, NULL} /* sentinel */ @@ -566,7 +588,7 @@ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */ 0, /* tp_doc */ (traverseproc)frame_traverse, /* tp_traverse */ - (inquiry)frame_clear, /* tp_clear */ + (inquiry)frame_tp_clear, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ @@ -708,6 +730,8 @@ f->f_lasti = -1; f->f_lineno = code->co_firstlineno; f->f_iblock = 0; + f->f_executing = 0; + f->f_gen = NULL; _PyObject_GC_TRACK(f); return f; diff --git a/Objects/genobject.c b/Objects/genobject.c --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -15,8 +15,8 @@ return 0; } -static void -gen_finalize(PyObject *self) +void +_PyGen_Finalize(PyObject *self) { PyGenObject *gen = (PyGenObject *)self; PyObject *res; @@ -140,6 +140,7 @@ Py_XDECREF(t); Py_XDECREF(v); Py_XDECREF(tb); + gen->gi_frame->f_gen = NULL; gen->gi_frame = NULL; Py_DECREF(f); } @@ -505,7 +506,7 @@ 0, /* tp_weaklist */ 0, /* tp_del */ 0, /* tp_version_tag */ - gen_finalize, /* tp_finalize */ + _PyGen_Finalize, /* tp_finalize */ }; PyObject * @@ -517,6 +518,7 @@ return NULL; } gen->gi_frame = f; + f->f_gen = (PyObject *) gen; Py_INCREF(f->f_code); gen->gi_code = (PyObject *)(f->f_code); gen->gi_running = 0; diff --git a/Python/ceval.c b/Python/ceval.c --- a/Python/ceval.c +++ b/Python/ceval.c @@ -1182,6 +1182,7 @@ stack_pointer = f->f_stacktop; assert(stack_pointer != NULL); f->f_stacktop = NULL; /* remains NULL unless yield suspends frame */ + f->f_executing = 1; if (co->co_flags & CO_GENERATOR && !throwflag) { if (f->f_exc_type != NULL && f->f_exc_type != Py_None) { @@ -3206,6 +3207,7 @@ /* pop frame */ exit_eval_frame: Py_LeaveRecursiveCall(); + f->f_executing = 0; tstate->frame = f->f_back; return retval; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 5 23:35:50 2013 From: python-checkins at python.org (antoine.pitrou) Date: Mon, 5 Aug 2013 23:35:50 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Followup_to_862ab99ab570?= =?utf-8?q?=3A_I_forgot_to_add_the_magnificent_test=5Fframe=2Epy=2E?= Message-ID: <3c8C2Z39HJzS7H@mail.python.org> http://hg.python.org/cpython/rev/438cdc97d8ee changeset: 85047:438cdc97d8ee user: Antoine Pitrou date: Mon Aug 05 23:35:43 2013 +0200 summary: Followup to 862ab99ab570: I forgot to add the magnificent test_frame.py. files: Lib/test/test_frame.py | 113 +++++++++++++++++++++++++++++ 1 files changed, 113 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py new file mode 100644 --- /dev/null +++ b/Lib/test/test_frame.py @@ -0,0 +1,113 @@ +import gc +import sys +import unittest +import weakref + +from test import support + + +class ClearTest(unittest.TestCase): + """ + Tests for frame.clear(). + """ + + def inner(self, x=5, **kwargs): + 1/0 + + def outer(self, **kwargs): + try: + self.inner(**kwargs) + except ZeroDivisionError as e: + exc = e + return exc + + def clear_traceback_frames(self, tb): + """ + Clear all frames in a traceback. + """ + while tb is not None: + tb.tb_frame.clear() + tb = tb.tb_next + + def test_clear_locals(self): + class C: + pass + c = C() + wr = weakref.ref(c) + exc = self.outer(c=c) + del c + support.gc_collect() + # A reference to c is held through the frames + self.assertIsNot(None, wr()) + self.clear_traceback_frames(exc.__traceback__) + support.gc_collect() + # The reference was released by .clear() + self.assertIs(None, wr()) + + def test_clear_generator(self): + endly = False + def g(): + nonlocal endly + try: + yield + inner() + finally: + endly = True + gen = g() + next(gen) + self.assertFalse(endly) + # Clearing the frame closes the generator + gen.gi_frame.clear() + self.assertTrue(endly) + + def test_clear_executing(self): + # Attempting to clear an executing frame is forbidden. + try: + 1/0 + except ZeroDivisionError as e: + f = e.__traceback__.tb_frame + with self.assertRaises(RuntimeError): + f.clear() + with self.assertRaises(RuntimeError): + f.f_back.clear() + + def test_clear_executing_generator(self): + # Attempting to clear an executing generator frame is forbidden. + endly = False + def g(): + nonlocal endly + try: + 1/0 + except ZeroDivisionError as e: + f = e.__traceback__.tb_frame + with self.assertRaises(RuntimeError): + f.clear() + with self.assertRaises(RuntimeError): + f.f_back.clear() + yield f + finally: + endly = True + gen = g() + f = next(gen) + self.assertFalse(endly) + + @support.cpython_only + def test_clear_refcycles(self): + # .clear() doesn't leave any refcycle behin + with support.disable_gc(): + class C: + pass + c = C() + wr = weakref.ref(c) + exc = self.outer(c=c) + del c + self.assertIsNot(None, wr()) + self.clear_traceback_frames(exc.__traceback__) + self.assertIs(None, wr()) + + +def test_main(): + support.run_unittest(__name__) + +if __name__ == "__main__": + test_main() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 01:31:57 2013 From: python-checkins at python.org (victor.stinner) Date: Tue, 6 Aug 2013 01:31:57 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Rewrite_the_PEP_446?= Message-ID: <3c8FcY2wMvz7Ljn@mail.python.org> http://hg.python.org/peps/rev/2858aee10763 changeset: 5034:2858aee10763 user: Victor Stinner date: Tue Aug 06 01:22:15 2013 +0200 summary: Rewrite the PEP 446 files: pep-0446.txt | 466 +++++++++++++++++++++++++++----------- 1 files changed, 329 insertions(+), 137 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -1,162 +1,369 @@ PEP: 446 -Title: Add new parameters to configure the inheritance of files and for non-blocking sockets +Title: Make newly created file descriptors non-inheritable Version: $Revision$ Last-Modified: $Date$ Author: Victor Stinner Status: Draft Type: Standards Track Content-Type: text/x-rst -Created: 3-July-2013 +Created: 5-August-2013 Python-Version: 3.4 Abstract ======== -This PEP proposes new portable parameters and functions to configure the -inheritance of file descriptors and the non-blocking flag of sockets. +Leaking file descriptors in child processes causes various annoying +issues and is a known major security vulnerability. This PEP proposes to +make all file descriptors created by Python non-inheritable by default +to have a well defined and portable behaviour and reduce the risk of +these issues. This PEP fixes also a race condition +in multithreaded applications on operating systems supporting atomic +flags to create non-inheritable file descriptors. Rationale ========= -Inheritance of file descriptors +Inheritance of File Descriptors ------------------------------- -The inheritance of file descriptors in child processes can be configured -on each file descriptor using a *close-on-exec* flag. By default, the -close-on-exec flag is not set. +Each operating system handles the inheritance of file descriptors +differently. Windows creates non-inheritable file descriptors by +default, whereas UNIX creates inheritable file descriptors. Python +prefers the POSIX API over the native Windows API to have a single code +base, and so creates inheritable file descriptors. -On Windows, the close-on-exec flag is the inverse of ``HANDLE_FLAG_INHERIT``. File -descriptors are not inherited if the ``bInheritHandles`` parameter of -the ``CreateProcess()`` function is ``FALSE``, even if the -``HANDLE_FLAG_INHERIT`` flag is set. If ``bInheritHandles`` is ``TRUE``, -only file descriptors with ``HANDLE_FLAG_INHERIT`` flag set are -inherited, others are not. +There is one exception: ``os.pipe()`` creates non-inheritable pipes on +Windows, whereas it creates inheritable pipes on UNIX. The reason comes +from an implementation artifact: ``os.pipe()`` calls ``CreatePipe()`` on +Windows, whereas it calls ``pipe()`` on UNIX. The call to +``CreatePipe()`` was added in 1994, before the introduction of +``pipe()`` in the POSIX API in Windows 98. The `issue #4708 +`_ proposes to change ``os.pipe()`` on +Windows to create inheritable pipes. -On UNIX, the close-on-exec flag is ``O_CLOEXEC``. File descriptors with -the ``O_CLOEXEC`` flag set are closed at the execution of a new program -(ex: when calling ``execv()``). -The ``O_CLOEXEC`` flag has no effect on ``fork()``, all file descriptors -are inherited by the child process. Futhermore, most properties file -descriptors are shared between the parent and the child processes, -except file attributes which are duplicated (``O_CLOEXEC`` is the only -file attribute). Setting ``O_CLOEXEC`` flag of a file descriptor in the -child process does not change the ``O_CLOEXEC`` flag of the file -descriptor in the parent process. +Inheritance of File Descriptors on Windows +------------------------------------------ +On Windows, the native type of file objects are handles (C type +``HANDLE``). These handles have a ``HANDLE_FLAG_INHERIT`` flag which +defines if a handle can be inherited in a child process or not. For the +POSIX API, the C runtime (CRT) provides also file descriptors (C type +``int``). The handle of a file descriptor can be retrieved using +``_get_osfhandle(fd)``. A file descriptor can be created from a handle +using ``_open_osfhandle(handle)``. -Issues of the inheritance of file descriptors ---------------------------------------------- +Handles are only inherited if their inheritable flag +(``HANDLE_FLAG_INHERIT``) is set and if the ``bInheritHandles`` +parameter of `CreateProcess() +`_ +is ``TRUE``. Using ``CreateProcess()``, all file descriptors except +standard streams (0, 1, 2) are closed in the child process, even if +``bInheritHandles`` is ``TRUE``. Using the ``spawnv()`` function, all +inheritable file descriptors are inherited in the child process. This +function uses the undocumented fields *cbReserved2* and *lpReserved2* of +the `STARTUPINFO +`_ +structure to pass an array of file descriptors. -Inheritance of file descriptors causes issues. For example, closing a -file descriptor in the parent process does not release the resource -(file, socket, ...), because the file descriptor is still open in the -child process. +To replace standard streams (stdin, stdout, stderr), the +``STARTF_USESTDHANDLES`` flag must be set in the *dwFlags* field of the +``STARTUPINFO`` structure and the *bInheritHandles* parameter of +``CreateProcess()`` must be set to ``TRUE``. So when at least one +standard stream is replaced, all inheritable handles are inherited by +the child process. -Leaking file descriptors is also a major security vulnerability. An -untrusted child process can read sensitive data like passwords and take -control of the parent process though leaked file descriptors. It is for -example a known vulnerability to escape from a chroot. +See also: +* `Handle Inheritance + `_ +* `Q315939: PRB: Child Inherits Unintended Handles During + CreateProcess Call `_ -Non-blocking sockets + +Inheritance of File Descriptors on UNIX +--------------------------------------- + +POSIX provides a *close-on-exec* flag on file descriptors to close +automatically a file descriptor when the C function ``execv()`` is +called. File descriptors with the *close-on-exec* flag unset are +inherited in the child process, file descriptros with the flag set are +closed in the child process. + +The flag can be set in two syscalls (one to get current flags, a second +to set new flags) using ``fcntl()``:: + + int flags, res; + flags = fcntl(fd, F_GETFD); + if (flags == -1) { /* handle the error */ } + flags |= FD_CLOEXEC; + /* or "flags &= ~FD_CLOEXEC;" to clear the flag */ + res = fcntl(fd, F_SETFD, flags); + if (res == -1) { /* handle the error */ } + +FreeBSD, Linux, Mac OS X, NetBSD, OpenBSD and QNX support also setting +the flag in a single syscall using ioctl():: + + int res; + res = ioctl(fd, FIOCLEX, 0); + if (!res) { /* handle the error */ } + +The *close-on-exec* flag has no effect on ``fork()``: all file +descriptors are inherited by the child process. The `Python issue #16500 +"Add an atfork module" `_ proposes to +add a new ``atfork`` module to execute code at fork. It may be used to +close automatically file descriptors at fork. + + +Issues with Inheritable File Descriptors +---------------------------------------- + +Most of the time, inheritable file descriptors "leaked" in child +processes are not noticed, because they don't cause major bugs. It does +not mean that these bugs must not be fixed. + +Two example of common issues with inherited file descriptors: + +* On Windows, a directory cannot be removed until all file handles open + in the directory are closed. It may explain why a temporary directory + cannot be removed. The same issue can be seen with files, except if + the file is temporary and was created with the ``FILE_SHARE_DELETE`` + flag (``O_TEMPORARY`` mode for ``open()``). +* If a listening socket is leaked in a child process, the socket address + cannot be reused until the parent and child processes terminated. For + example, if a web server spawn a new program to handle a process, and + the server restarts while the program is not done: the server cannot + start because the TCP port is still in use. + +Leaking file descriptors is also a well known security vulnerability: +read +`FIO42-C. Ensure files are properly closed when they are no longer +needed +`_ +of the CERT. + +An untrusted child process can read sensitive data like passwords and +take control of the parent process though leaked file descriptors. It is +for example a known vulnerability to escape from a chroot. With a leaked +listening socket, a child process can accept new connections to read +sensitive data. + + +Atomic Creation of non-inheritable File Descriptors +--------------------------------------------------- + +In a multithreaded application, a inheritable file descriptor can be +created just before a new program is spawn, before the file descriptor +is made non-inheritable. In this case, fhe file descriptor is leaked to +the child process. This race condition could be avoided if the file +descriptor is created directly non-inheritable. + +FreeBSD, Linux, Mac OS X, Windows and many other operating systems +support creating non-inheritable file descriptors with the inheritable +flag cleared atomically at the creating of the file descriptor. + +On Windows, since at least Windows XP, the `SECURITY_ATTRIBUTES +`_ +structure can be used to clear the ``HANDLE_FLAG_INHERIT`` flag: set +*bInheritHandle* field to ``FALSE``. This structure cannot be used with +sockets: a new ``WSA_FLAG_NO_HANDLE_INHERIT`` flag was added in Windows +7 SP1 and Windows Server 2008 R2 SP1 for ``WSASocket()``. If this flag +is used on an older Windows verison (ex: Windows XP SP3), +``WSASocket()`` fails with ``WSAEPROTOTYPE``. + +On UNIX, new flags were added for files and sockets: + + * ``O_CLOEXEC``: available on Linux (2.6.23), FreeBSD (8.3), + OpenBSD 5.0, Solaris 11, QNX, BeOS, next NetBSD release (6.1?). + This flag is part of POSIX.1-2008. + * ``SOCK_CLOEXEC`` flag for ``socket()`` and ``socketpair()``, + available on Linux 2.6.27, OpenBSD 5.2, NetBSD 6.0. + * ``fcntl()``: ``F_DUPFD_CLOEXEC`` flag, available on Linux 2.6.24, + OpenBSD 5.0, FreeBSD 9.1, NetBSD 6.0, Solaris 11. This flag is part + of POSIX.1-2008. + * ``fcntl()``: ``F_DUP2FD_CLOEXEC`` flag, available on FreeBSD 9.1 + and Solaris 11. + * ``recvmsg()``: ``MSG_CMSG_CLOEXEC``, available on Linux 2.6.23, + NetBSD 6.0. + +On Linux older than 2.6.23, ``O_CLOEXEC`` flag is simply ignored. So +``fcntl()`` must be called to check if the file descriptor is +non-inheritable: ``O_CLOEXEC`` is not supported if the ``FD_CLOEXEC`` +flag is missing. On Linux older than 2.6.27, ``socket()`` or +``socketpair()`` fail with ``errno`` set to ``EINVAL`` if the +``SOCK_CLOEXEC`` flag is set in the socket type. + +New functions: + + * ``dup3()``: available on Linux 2.6.27 (and glibc 2.9) + * ``pipe2()``: available on Linux 2.6.27 (and glibc 2.9) + * ``accept4()``: available on Linux 2.6.28 (and glibc 2.10) + +On Linux older than 2.6.28, ``accept4()`` fails with ``errno`` set to +``ENOSYS``. + +Summary: + +=========================== =============== ==================================== +Operating System Atomic File Atomic Socket +=========================== =============== ==================================== +FreeBSD 8.3 (2012) X +Linux 2.6.23 (2007) 2.6.27 (2008) +Mac OS X 10.8 (2012) X +NetBSD 6.1 (?) 6.0 (2012) +OpenBSD 5.0 (2011) 5.2 (2012) +Solaris 11 (2011) X +Windows XP (2001) Seven SP1 (2011), 2008 R2 SP1 (2011) +=========================== =============== ==================================== + +Legend: + +* "Atomic File": first version of the operating system supporting + creating atomatically a non-inheritable file descriptor using + ``open()`` +* "Atomic Socket": first version of the operating system supporting + creating atomatically a non-inheritable socket +* "X": not supported yet + + +Status in Python 3.3 -------------------- -To handle multiple network clients in a single thread, a multiplexing -function like ``select()`` can be used. For best performances, sockets -must be configured as non-blocking. Operations like ``send()`` and -``recv()`` return an ``EAGAIN`` or ``EWOULDBLOCK`` error if the -operation would block. +Python 3.3 creates inheritable file descriptors on all platforms, except +``os.pipe()`` which creates non-inheritable file descriptors on Windows. -By default, newly created sockets are blocking. Setting the non-blocking -mode requires additional system calls. +New constants and functions related to the atomic creation of +non-inheritable file descriptors were added to Python 3.3: +``os.O_CLOEXEC``, ``os.pipe2()`` and ``socket.SOCK_CLOEXEC``. -On UNIX, the blocking flag is ``O_NONBLOCK``: a pipe and a socket are -non-blocking if the ``O_NONBLOCK`` flag is set. +On UNIX, the ``subprocess`` module closes all file descriptors in the +child process, except standard streams (0, 1, 2) and file descriptors of +the *pass_fds* parameter. If the *close_fds* parameter is set to +``False``, all inheritable file descriptors are inherited in the child +process. +On Windows, the ``subprocess`` closes all handles and file descriptors +in the child process by default. If at least one standard stream (stdin, +stdout or stderr) is replaced (ex: redirected into a pipe), all +inheritable handles are inherited in the child process -Setting flags at the creation of the file descriptor ----------------------------------------------------- +All inheritable file descriptors are inherited by the child process +using the functions of the ``os.execv*()`` and ``os.spawn*()`` families. -Windows and recent versions of other operating systems like Linux -support setting the close-on-exec flag directly at the creation of file -descriptors, and close-on-exec and blocking flags at the creation of -sockets. +On UNIX, the ``multiprocessing`` module uses ``os.fork()`` and so all +file descriptors are inherited by child processes. -Setting these flags at the creation is atomic and avoids additional -system calls. +On Windows, all inheritable handles are inherited by the child process +using the ``multiprocessing`` module, all file descriptors except +standard streams are closed. + +Summary: + +=========================== ============= ================== ============= +Module FD on UNIX Handles on Windows FD on Windows +=========================== ============= ================== ============= +subprocess, default STD, pass_fds none STD +subprocess, close_fds=False all all STD +os.execv(), os.spawn() all all all +multiprocessing all all STD +=========================== ============= ================== ============= + +Legend: + +* "all": all *inheritable* file descriptors or handles are inherited in + the child process +* "none": all handles are closed in the child process +* "STD": only file descriptors 0 (stdin), 1 (stdout) and 2 (stderr) are + inherited in the child process +* "pass_fds": file descriptors of the *pass_fds* parameter of the + subprocess are inherited Proposal ======== -New cloexec And blocking Parameters ------------------------------------ +Non-inheritable File Descriptors +-------------------------------- -Add a new optional *cloexec* on functions creating file descriptors: +The following functions are modified to make newly created file +descriptors as non-inheritable by default: -* ``io.FileIO`` -* ``io.open()`` -* ``open()`` -* ``os.dup()`` -* ``os.dup2()`` -* ``os.fdopen()`` -* ``os.open()`` -* ``os.openpty()`` -* ``os.pipe()`` -* ``select.devpoll()`` -* ``select.epoll()`` -* ``select.kqueue()`` - -Add new optional *cloexec* and *blocking* parameters to functions -creating sockets: - -* ``asyncore.dispatcher.create_socket()`` -* ``socket.socket()`` -* ``socket.socket.accept()`` -* ``socket.socket.dup()`` -* ``socket.socket.fromfd`` -* ``socket.socketpair()`` - -The default value of *cloexec* is ``False`` and the default value of -*blocking* is ``True``. - -The atomicity is not guaranteed. If the platform does not support -setting close-on-exec and blocking flags at the creation of the file -descriptor or socket, the flags are set using additional system calls. + * ``asyncore.dispatcher.create_socket()`` + * ``io.FileIO`` + * ``io.open()`` + * ``open()`` + * ``os.dup()`` + * ``os.dup2()`` + * ``os.fdopen()`` + * ``os.open()`` + * ``os.openpty()`` + * ``os.pipe()`` + * ``select.devpoll()`` + * ``select.epoll()`` + * ``select.kqueue()`` + * ``socket.socket()`` + * ``socket.socket.accept()`` + * ``socket.socket.dup()`` + * ``socket.socket.fromfd`` + * ``socket.socketpair()`` New Functions ------------- -Add new functions the get and set the close-on-exec flag of a file -descriptor, available on all platforms: +* ``os.get_inheritable(fd: int)``: return ``True`` if the file + descriptor can be inherited by child processes, ``False`` otherwise. +* ``os.set_inheritable(fd: int, inheritable: bool)``: set the + inheritable flag of the specified file descriptor. -* ``os.get_cloexec(fd:int) -> bool`` -* ``os.set_cloexec(fd:int, cloexec: bool)`` +These new functions are available on all platforms. -Add new functions the get and set the blocking flag of a file -descriptor, only available on UNIX: - -* ``os.get_blocking(fd:int) -> bool`` -* ``os.set_blocking(fd:int, blocking: bool)`` +On Windows, these functions accept also "file descriptors" of sockets: +the result of ``sockobj.fileno()``. Other Changes ------------- -The ``subprocess.Popen`` class must clear the close-on-exec flag of file -descriptors of the ``pass_fds`` parameter. The flag is cleared in the -child process before executing the program; the change does not change -the flag in the parent process. +* On UNIX, subprocess makes file descriptors of the *pass_fds* parameter + inheritable. The file descriptor is made inheritable in the child + process after the ``fork()`` and before ``execv()``, the inheritable + flag of file descriptors is unchanged in the parent process. -The close-on-exec flag must also be set on private file descriptors and -sockets in the Python standard library. For example, on UNIX, -os.urandom() opens ``/dev/urandom`` to read some random bytes and the -file descriptor is closed at function exit. The file descriptor is not -expected to be inherited by child processes. +* ``os.dup2(fd, fd2)`` makes *fd2* inheritable if *fd2* is ``0`` + (stdin), ``1`` (stdout) or ``2`` (stderr) and *fd2* is different than + *fd*. + + +Backward Compatibility +====================== + +This PEP break applications relying on inheritance of file descriptors. +Developers are encouraged to reuse the high-level Python module +``subprocess`` which handle the inheritance of file descriptors in a +portable way. + +Applications using the ``subprocess`` module with the *pass_fds* +parameter or using ``os.dup2()`` to redirect standard streams should not +be affected. + +Python does no more conform to POSIX, since file descriptors are made +non-inheritable by default. Python was not designed to conform to POSIX, +Python is designed to develop portable applications. + + +Previous Work +============= + +The programming languages Go, Perl and Ruby make newly created file +descriptors non-inheritable: since Go 1.0, Perl 1.0 and Ruby 2.0. + +The SCons project overrides builtin functions ``file()`` and ``open()`` +to make files non-inheritable on Windows: +see `win32.py +`_. Rejected Alternatives @@ -169,45 +376,27 @@ is a previous attempt proposing various other alternatives, but no consensus could be reached. -This PEP has a well defined behaviour (the default value of the new -*cloexec* parameter is not configurable), is more conservative (no -backward compatibility issue), and is much simpler. +No special case for standard streams +------------------------------------ +Functions handling file descriptors should not handle standard streams +(file descriptors ``0``, ``1``, ``2``) differently. -Add blocking parameter for file descriptors and use Windows overlapped I/O --------------------------------------------------------------------------- +This option does not work on Windows. On Windows, +``os.set_inheritable(fd, inheritable)`` (calling +``SetHandleInformation()`` to set or clear ``HANDLE_FLAG_INHERIT`` flag) +on file descriptor ``0`` (stdin), ``1`` (stdout) or ``2`` (stderr) fails +with ``OSError(87, 'invalid argument')``. If ``os.dup2(fd, fd2)`` would +always make *fd2* non-inheritable, the function would raise an exception +when used to redirect standard streams. -Windows supports non-blocking operations on files using an extension of -the Windows API called "Overlapped I/O". Using this extension requires -to modify the Python standard library and applications to pass a -``OVERLAPPED`` structure and an event loop to wait for the completion of -operations. +Another option is to add a new *inheritable* parameter to ``os.dup2()``. -This PEP only tries to expose portable flags on file descriptors and -sockets. Supporting overlapped I/O requires an abstraction providing a -high-level and portable API for asynchronous operations on files and -sockets. Overlapped I/O are out of the scope of this PEP. - -UNIX supports non-blocking files, moreover recent versions of operating -systems support setting the non-blocking flag at the creation of a file -descriptor. It would be possible to add a new optional *blocking* -parameter to Python functions creating file descriptors. On Windows, -creating a file descriptor with ``blocking=False`` would raise a -``NotImplementedError``. This behaviour is not acceptable for the ``os`` -module which is designed as a thin wrapper on the C functions of the -operating system. If a platform does not support a function, the -function should not be available on the platform. For example, -the ``os.fork()`` function is not available on Windows. - -UNIX has more flag on file descriptors: ``O_DSYNC``, ``O_SYNC``, -``O_DIRECT``, etc. Adding all these flags complicates the signature and -the implementation of functions creating file descriptor like open(). -Moreover, these flags do not work on any file type, and are not -portable. - -For all these reasons, this alternative was rejected. The PEP 3156 -proposes an abstraction for asynchronous I/O supporting non-blocking -files on Windows. +This PEP has a special-case for ``os.dup2()`` to not break backward +compatibility on applications redirection standard streams before +calling the C function ``execv()``. Examples in the Python standard +library: ``CGIHTTPRequestHandler.run_cgi()`` and ``pty.fork()`` use +``os.dup2()`` to redict stdin, stdout and stderr. Links @@ -230,6 +419,8 @@ `_ * `#17070: Use the new cloexec to improve security and avoid bugs `_ +* `#18571: Implementation of the PEP 446: non-inheriable file + descriptors `_ Other links: @@ -246,3 +437,4 @@ This document has been placed into the public domain. + -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Tue Aug 6 02:19:04 2013 From: python-checkins at python.org (victor.stinner) Date: Tue, 6 Aug 2013 02:19:04 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_cleanup?= Message-ID: <3c8Gfw35yJzSTs@mail.python.org> http://hg.python.org/peps/rev/2797d53b1859 changeset: 5035:2797d53b1859 user: Victor Stinner date: Tue Aug 06 02:18:49 2013 +0200 summary: PEP 446: cleanup files: pep-0446.txt | 163 +++++++++++++++++++------------------- 1 files changed, 81 insertions(+), 82 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -16,10 +16,9 @@ Leaking file descriptors in child processes causes various annoying issues and is a known major security vulnerability. This PEP proposes to make all file descriptors created by Python non-inheritable by default -to have a well defined and portable behaviour and reduce the risk of -these issues. This PEP fixes also a race condition -in multithreaded applications on operating systems supporting atomic -flags to create non-inheritable file descriptors. +to reduces the risk of these issues. This PEP fixes also a race +condition in multithreaded applications on operating systems supporting +atomic flags to create non-inheritable file descriptors. Rationale @@ -30,16 +29,16 @@ Each operating system handles the inheritance of file descriptors differently. Windows creates non-inheritable file descriptors by -default, whereas UNIX creates inheritable file descriptors. Python -prefers the POSIX API over the native Windows API to have a single code -base, and so creates inheritable file descriptors. +default, whereas UNIX creates inheritable file descriptors by default. +Python prefers the POSIX API over the native Windows API to have a +single code base, and so it creates inheritable file descriptors. There is one exception: ``os.pipe()`` creates non-inheritable pipes on -Windows, whereas it creates inheritable pipes on UNIX. The reason comes -from an implementation artifact: ``os.pipe()`` calls ``CreatePipe()`` on -Windows, whereas it calls ``pipe()`` on UNIX. The call to -``CreatePipe()`` was added in 1994, before the introduction of -``pipe()`` in the POSIX API in Windows 98. The `issue #4708 +Windows, whereas it creates inheritable pipes on UNIX. The reason is an +implementation artifact: ``os.pipe()`` calls ``CreatePipe()`` on Windows +(native API), whereas it calls ``pipe()`` on UNIX (POSIX API). The call +to ``CreatePipe()`` was added in Python in 1994, before the introduction +of ``pipe()`` in the POSIX API in Windows 98. The `issue #4708 `_ proposes to change ``os.pipe()`` on Windows to create inheritable pipes. @@ -51,29 +50,29 @@ ``HANDLE``). These handles have a ``HANDLE_FLAG_INHERIT`` flag which defines if a handle can be inherited in a child process or not. For the POSIX API, the C runtime (CRT) provides also file descriptors (C type -``int``). The handle of a file descriptor can be retrieved using -``_get_osfhandle(fd)``. A file descriptor can be created from a handle -using ``_open_osfhandle(handle)``. +``int``). The handle of a file descriptor can be get using the +function ``_get_osfhandle(fd)``. A file descriptor can be created from a +handle using the function ``_open_osfhandle(handle)``. -Handles are only inherited if their inheritable flag +Using `CreateProcess() +`_, +handles are only inherited if their inheritable flag (``HANDLE_FLAG_INHERIT``) is set and if the ``bInheritHandles`` -parameter of `CreateProcess() -`_ -is ``TRUE``. Using ``CreateProcess()``, all file descriptors except -standard streams (0, 1, 2) are closed in the child process, even if -``bInheritHandles`` is ``TRUE``. Using the ``spawnv()`` function, all -inheritable file descriptors are inherited in the child process. This -function uses the undocumented fields *cbReserved2* and *lpReserved2* of -the `STARTUPINFO +parameter of ``CreateProcess()`` is ``TRUE``; all file descriptors +except standard streams (0, 1, 2) are closed in the child process, even +if ``bInheritHandles`` is ``TRUE``. Using the ``spawnv()`` function, all +inheritable handles and all inheritable file descriptors are inherited +in the child process. This function uses the undocumented fields +*cbReserved2* and *lpReserved2* of the `STARTUPINFO `_ structure to pass an array of file descriptors. -To replace standard streams (stdin, stdout, stderr), the -``STARTF_USESTDHANDLES`` flag must be set in the *dwFlags* field of the -``STARTUPINFO`` structure and the *bInheritHandles* parameter of -``CreateProcess()`` must be set to ``TRUE``. So when at least one -standard stream is replaced, all inheritable handles are inherited by -the child process. +To replace standard streams (stdin, stdout, stderr) using +``CreateProcess()``, the ``STARTF_USESTDHANDLES`` flag must be set in +the *dwFlags* field of the ``STARTUPINFO`` structure and the +*bInheritHandles* parameter of ``CreateProcess()`` must be set to +``TRUE``. So when at least one standard stream is replaced, all +inheritable handles are inherited by the child process. See also: @@ -88,8 +87,8 @@ POSIX provides a *close-on-exec* flag on file descriptors to close automatically a file descriptor when the C function ``execv()`` is -called. File descriptors with the *close-on-exec* flag unset are -inherited in the child process, file descriptros with the flag set are +called. File descriptors with the *close-on-exec* flag cleared are +inherited in the child process, file descriptors with the flag set are closed in the child process. The flag can be set in two syscalls (one to get current flags, a second @@ -113,8 +112,8 @@ The *close-on-exec* flag has no effect on ``fork()``: all file descriptors are inherited by the child process. The `Python issue #16500 "Add an atfork module" `_ proposes to -add a new ``atfork`` module to execute code at fork. It may be used to -close automatically file descriptors at fork. +add a new ``atfork`` module to execute code at fork, it may be used to +close automatically file descriptors. Issues with Inheritable File Descriptors @@ -124,15 +123,14 @@ processes are not noticed, because they don't cause major bugs. It does not mean that these bugs must not be fixed. -Two example of common issues with inherited file descriptors: +Two examples of common issues with inherited file descriptors: -* On Windows, a directory cannot be removed until all file handles open - in the directory are closed. It may explain why a temporary directory - cannot be removed. The same issue can be seen with files, except if - the file is temporary and was created with the ``FILE_SHARE_DELETE`` - flag (``O_TEMPORARY`` mode for ``open()``). +* On Windows, a directory cannot be removed before all file handles open + in the directory are closed. The same issue can be seen with files, + except if the file was created with the ``FILE_SHARE_DELETE`` flag + (``O_TEMPORARY`` mode for ``open()``). * If a listening socket is leaked in a child process, the socket address - cannot be reused until the parent and child processes terminated. For + cannot be reused before the parent and child processes terminated. For example, if a web server spawn a new program to handle a process, and the server restarts while the program is not done: the server cannot start because the TCP port is still in use. @@ -146,9 +144,9 @@ An untrusted child process can read sensitive data like passwords and take control of the parent process though leaked file descriptors. It is -for example a known vulnerability to escape from a chroot. With a leaked -listening socket, a child process can accept new connections to read -sensitive data. +for example a way to escape from a chroot. With a leaked listening +socket, a child process can accept new connections to read sensitive +data. Atomic Creation of non-inheritable File Descriptors @@ -156,22 +154,19 @@ In a multithreaded application, a inheritable file descriptor can be created just before a new program is spawn, before the file descriptor -is made non-inheritable. In this case, fhe file descriptor is leaked to +is made non-inheritable. In this case, the file descriptor is leaked to the child process. This race condition could be avoided if the file descriptor is created directly non-inheritable. FreeBSD, Linux, Mac OS X, Windows and many other operating systems support creating non-inheritable file descriptors with the inheritable -flag cleared atomically at the creating of the file descriptor. +flag cleared atomically at the creation of the file descriptor. -On Windows, since at least Windows XP, the `SECURITY_ATTRIBUTES -`_ -structure can be used to clear the ``HANDLE_FLAG_INHERIT`` flag: set -*bInheritHandle* field to ``FALSE``. This structure cannot be used with -sockets: a new ``WSA_FLAG_NO_HANDLE_INHERIT`` flag was added in Windows -7 SP1 and Windows Server 2008 R2 SP1 for ``WSASocket()``. If this flag -is used on an older Windows verison (ex: Windows XP SP3), -``WSASocket()`` fails with ``WSAEPROTOTYPE``. +A new ``WSA_FLAG_NO_HANDLE_INHERIT`` flag for ``WSASocket()`` was added +in Windows 7 SP1 and Windows Server 2008 R2 SP1 to create +non-inheritable sockets. If this flag is used on an older Windows +version (ex: Windows XP SP3), ``WSASocket()`` fails with +``WSAEPROTOTYPE``. On UNIX, new flags were added for files and sockets: @@ -221,14 +216,14 @@ Legend: * "Atomic File": first version of the operating system supporting - creating atomatically a non-inheritable file descriptor using + creating atomically a non-inheritable file descriptor using ``open()`` * "Atomic Socket": first version of the operating system supporting - creating atomatically a non-inheritable socket + creating atomically a non-inheritable socket * "X": not supported yet -Status in Python 3.3 +Status of Python 3.3 -------------------- Python 3.3 creates inheritable file descriptors on all platforms, except @@ -239,15 +234,15 @@ ``os.O_CLOEXEC``, ``os.pipe2()`` and ``socket.SOCK_CLOEXEC``. On UNIX, the ``subprocess`` module closes all file descriptors in the -child process, except standard streams (0, 1, 2) and file descriptors of -the *pass_fds* parameter. If the *close_fds* parameter is set to -``False``, all inheritable file descriptors are inherited in the child -process. +child process by default, except standard streams (0, 1, 2) and file +descriptors of the *pass_fds* parameter. If the *close_fds* parameter is +set to ``False``, all inheritable file descriptors are inherited in the +child process. On Windows, the ``subprocess`` closes all handles and file descriptors in the child process by default. If at least one standard stream (stdin, stdout or stderr) is replaced (ex: redirected into a pipe), all -inheritable handles are inherited in the child process +inheritable handles are inherited in the child process. All inheritable file descriptors are inherited by the child process using the functions of the ``os.execv*()`` and ``os.spawn*()`` families. @@ -288,7 +283,7 @@ -------------------------------- The following functions are modified to make newly created file -descriptors as non-inheritable by default: +descriptors non-inheritable by default: * ``asyncore.dispatcher.create_socket()`` * ``io.FileIO`` @@ -306,21 +301,25 @@ * ``socket.socket()`` * ``socket.socket.accept()`` * ``socket.socket.dup()`` - * ``socket.socket.fromfd`` + * ``socket.socket.fromfd()`` * ``socket.socketpair()`` +When available, atomic flags are used to make file descriptors +non-inheritable. The atomicity is not guaranteed because a fallback is +required when atomic flags are not available. + New Functions ------------- * ``os.get_inheritable(fd: int)``: return ``True`` if the file descriptor can be inherited by child processes, ``False`` otherwise. -* ``os.set_inheritable(fd: int, inheritable: bool)``: set the +* ``os.set_inheritable(fd: int, inheritable: bool)``: clear or set the inheritable flag of the specified file descriptor. These new functions are available on all platforms. -On Windows, these functions accept also "file descriptors" of sockets: +On Windows, these functions accept also file descriptors of sockets: the result of ``sockobj.fileno()``. @@ -329,7 +328,7 @@ * On UNIX, subprocess makes file descriptors of the *pass_fds* parameter inheritable. The file descriptor is made inheritable in the child - process after the ``fork()`` and before ``execv()``, the inheritable + process after the ``fork()`` and before ``execv()``, so the inheritable flag of file descriptors is unchanged in the parent process. * ``os.dup2(fd, fd2)`` makes *fd2* inheritable if *fd2* is ``0`` @@ -342,23 +341,24 @@ This PEP break applications relying on inheritance of file descriptors. Developers are encouraged to reuse the high-level Python module -``subprocess`` which handle the inheritance of file descriptors in a +``subprocess`` which handles the inheritance of file descriptors in a portable way. Applications using the ``subprocess`` module with the *pass_fds* parameter or using ``os.dup2()`` to redirect standard streams should not be affected. -Python does no more conform to POSIX, since file descriptors are made -non-inheritable by default. Python was not designed to conform to POSIX, -Python is designed to develop portable applications. +Python does no more conform to POSIX, since file descriptors are now +made non-inheritable by default. Python was not designed to conform to +POSIX, but was designed to develop portable applications. -Previous Work -============= +Related Work +============ The programming languages Go, Perl and Ruby make newly created file -descriptors non-inheritable: since Go 1.0, Perl 1.0 and Ruby 2.0. +descriptors non-inheritable by default: since Go 1.0 (2009), Perl 1.0 +(1987) and Ruby 2.0 (2013). The SCons project overrides builtin functions ``file()`` and ``open()`` to make files non-inheritable on Windows: @@ -382,18 +382,17 @@ Functions handling file descriptors should not handle standard streams (file descriptors ``0``, ``1``, ``2``) differently. -This option does not work on Windows. On Windows, -``os.set_inheritable(fd, inheritable)`` (calling -``SetHandleInformation()`` to set or clear ``HANDLE_FLAG_INHERIT`` flag) -on file descriptor ``0`` (stdin), ``1`` (stdout) or ``2`` (stderr) fails -with ``OSError(87, 'invalid argument')``. If ``os.dup2(fd, fd2)`` would -always make *fd2* non-inheritable, the function would raise an exception -when used to redirect standard streams. +This option does not work on Windows. On Windows, calling +``SetHandleInformation()`` to set or clear ``HANDLE_FLAG_INHERIT`` flag +on standard streams (0, 1, 2) fails with the Windows error 87 (invalid +argument). If ``os.dup2(fd, fd2)`` would always make *fd2* +non-inheritable, the function would raise an exception when used to +redirect standard streams. Another option is to add a new *inheritable* parameter to ``os.dup2()``. This PEP has a special-case for ``os.dup2()`` to not break backward -compatibility on applications redirection standard streams before +compatibility on applications redirecting standard streams before calling the C function ``execv()``. Examples in the Python standard library: ``CGIHTTPRequestHandler.run_cgi()`` and ``pty.fork()`` use ``os.dup2()`` to redict stdin, stdout and stderr. -- Repository URL: http://hg.python.org/peps From solipsis at pitrou.net Tue Aug 6 05:48:52 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Tue, 06 Aug 2013 05:48:52 +0200 Subject: [Python-checkins] Daily reference leaks (438cdc97d8ee): sum=0 Message-ID: results for 438cdc97d8ee on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogEdzcvo', '-x'] From python-checkins at python.org Tue Aug 6 07:43:32 2013 From: python-checkins at python.org (raymond.hettinger) Date: Tue, 6 Aug 2013 07:43:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Replace_outdated_optimizat?= =?utf-8?q?ion_with_clearer_code_that_compiles_better=2E?= Message-ID: <3c8PsJ4Cgdz7Lkw@mail.python.org> http://hg.python.org/cpython/rev/56bff9a8cfdd changeset: 85048:56bff9a8cfdd user: Raymond Hettinger date: Mon Aug 05 22:24:50 2013 -0700 summary: Replace outdated optimization with clearer code that compiles better. Letting the compiler decide how to optimize the multiply by five gives it the freedom to make better choices for the best technique for a given target machine. For example, GCC on x86_64 produces a little bit better code: Old-way (3 steps with a data dependency between each step): shrq $5, %r13 leaq 1(%rbx,%r13), %rax leaq (%rax,%rbx,4), %rbx New-way (3 steps with no dependency between the first two steps which can be run in parallel): leaq (%rbx,%rbx,4), %rax # i*5 shrq $5, %r13 # perturb >>= PERTURB_SHIFT leaq 1(%r13,%rax), %rbx # 1 + perturb + i*5 files: Objects/setobject.c | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -118,7 +118,7 @@ /* In the loop, key == dummy is by far (factor of 100s) the least likely outcome, so test for that last. */ for (perturb = hash; ; perturb >>= PERTURB_SHIFT) { - i = (i << 2) + i + perturb + 1; + i = i * 5 + perturb + 1; entry = &table[i & mask]; if (entry->key == NULL) { if (freeslot != NULL) @@ -189,7 +189,7 @@ /* In the loop, key == dummy is by far (factor of 100s) the least likely outcome, so test for that last. */ for (perturb = hash; ; perturb >>= PERTURB_SHIFT) { - i = (i << 2) + i + perturb + 1; + i = i * 5 + perturb + 1; entry = &table[i & mask]; if (entry->key == NULL) return freeslot == NULL ? entry : freeslot; @@ -258,7 +258,7 @@ i = (size_t)hash & mask; entry = &table[i]; for (perturb = hash; entry->key != NULL; perturb >>= PERTURB_SHIFT) { - i = (i << 2) + i + perturb + 1; + i = i * 5 + perturb + 1; entry = &table[i & mask]; } so->fill++; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 07:43:33 2013 From: python-checkins at python.org (raymond.hettinger) Date: Tue, 6 Aug 2013 07:43:33 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Put_the_most_important_and?= =?utf-8?q?_most_frequency_accessed_struct_member_first=2E?= Message-ID: <3c8PsK64lSz7Ll5@mail.python.org> http://hg.python.org/cpython/rev/20557286cc35 changeset: 85049:20557286cc35 user: Raymond Hettinger date: Mon Aug 05 22:43:22 2013 -0700 summary: Put the most important and most frequency accessed struct member first. files: Include/setobject.h | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Include/setobject.h b/Include/setobject.h --- a/Include/setobject.h +++ b/Include/setobject.h @@ -23,8 +23,8 @@ typedef struct { /* Cached hash code of the key. */ + PyObject *key; Py_hash_t hash; - PyObject *key; } setentry; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 10:52:03 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Tue, 6 Aug 2013 10:52:03 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2314323=3A_Expanded?= =?utf-8?q?_the_number_of_digits_in_the_coefficients_for_the?= Message-ID: <3c8V2q2JsszRFT@mail.python.org> http://hg.python.org/cpython/rev/80e9cb6163b4 changeset: 85050:80e9cb6163b4 user: Serhiy Storchaka date: Tue Aug 06 11:51:23 2013 +0300 summary: Issue #14323: Expanded the number of digits in the coefficients for the RGB -- YIQ conversions so that they match the FCC NTSC versions. files: Doc/whatsnew/3.4.rst | 7 +++++ Lib/colorsys.py | 18 ++++++++++---- Lib/test/test_colorsys.py | 32 +++++++++++++++++++++++--- Misc/NEWS | 3 ++ 4 files changed, 51 insertions(+), 9 deletions(-) diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -251,6 +251,13 @@ The module supports new file types: door, event port and whiteout. +colorsys +-------- + +The number of digits in the coefficients for the RGB --- YIQ conversions have +been expanded so that they match the FCC NTSC versions. The change in +results should be less than 1% and may better match results found elsewhere. + Optimizations ============= diff --git a/Lib/colorsys.py b/Lib/colorsys.py --- a/Lib/colorsys.py +++ b/Lib/colorsys.py @@ -33,17 +33,25 @@ # YIQ: used by composite video signals (linear combinations of RGB) # Y: perceived grey level (0.0 == black, 1.0 == white) # I, Q: color components +# +# There are a great many versions of the constants used in these formulae. +# The ones in this library uses constants from the FCC version of NTSC. def rgb_to_yiq(r, g, b): y = 0.30*r + 0.59*g + 0.11*b - i = 0.60*r - 0.28*g - 0.32*b - q = 0.21*r - 0.52*g + 0.31*b + i = 0.74*(r-y) - 0.27*(b-y) + q = 0.48*(r-y) + 0.41*(b-y) return (y, i, q) def yiq_to_rgb(y, i, q): - r = y + 0.948262*i + 0.624013*q - g = y - 0.276066*i - 0.639810*q - b = y - 1.105450*i + 1.729860*q + # r = y + (0.27*q + 0.41*i) / (0.74*0.41 + 0.27*0.48) + # b = y + (0.74*q - 0.48*i) / (0.74*0.41 + 0.27*0.48) + # g = y - (0.30*(r-y) + 0.11*(b-y)) / 0.59 + + r = y + 0.9468822170900693*i + 0.6235565819861433*q + g = y - 0.27478764629897834*i - 0.6356910791873801*q + b = y - 1.1085450346420322*i + 1.7090069284064666*q + if r < 0.0: r = 0.0 if g < 0.0: diff --git a/Lib/test/test_colorsys.py b/Lib/test/test_colorsys.py --- a/Lib/test/test_colorsys.py +++ b/Lib/test/test_colorsys.py @@ -1,4 +1,4 @@ -import unittest, test.support +import unittest import colorsys def frange(start, stop, step): @@ -69,8 +69,32 @@ self.assertTripleEqual(hls, colorsys.rgb_to_hls(*rgb)) self.assertTripleEqual(rgb, colorsys.hls_to_rgb(*hls)) -def test_main(): - test.support.run_unittest(ColorsysTest) + def test_yiq_roundtrip(self): + for r in frange(0.0, 1.0, 0.2): + for g in frange(0.0, 1.0, 0.2): + for b in frange(0.0, 1.0, 0.2): + rgb = (r, g, b) + self.assertTripleEqual( + rgb, + colorsys.yiq_to_rgb(*colorsys.rgb_to_yiq(*rgb)) + ) + + def test_yiq_values(self): + values = [ + # rgb, yiq + ((0.0, 0.0, 0.0), (0.0, 0.0, 0.0)), # black + ((0.0, 0.0, 1.0), (0.11, -0.3217, 0.3121)), # blue + ((0.0, 1.0, 0.0), (0.59, -0.2773, -0.5251)), # green + ((0.0, 1.0, 1.0), (0.7, -0.599, -0.213)), # cyan + ((1.0, 0.0, 0.0), (0.3, 0.599, 0.213)), # red + ((1.0, 0.0, 1.0), (0.41, 0.2773, 0.5251)), # purple + ((1.0, 1.0, 0.0), (0.89, 0.3217, -0.3121)), # yellow + ((1.0, 1.0, 1.0), (1.0, 0.0, 0.0)), # white + ((0.5, 0.5, 0.5), (0.5, 0.0, 0.0)), # grey + ] + for (rgb, yiq) in values: + self.assertTripleEqual(yiq, colorsys.rgb_to_yiq(*rgb)) + self.assertTripleEqual(rgb, colorsys.yiq_to_rgb(*yiq)) if __name__ == "__main__": - test_main() + unittest.main() diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -202,6 +202,9 @@ Library ------- +- Issue #14323: Expanded the number of digits in the coefficients for the + RGB -- YIQ conversions so that they match the FCC NTSC versions. + - Issue #17998: Fix an internal error in regular expression engine. - Issue #17557: Fix os.getgroups() to work with the modified behavior of -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 15:57:04 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Tue, 6 Aug 2013 15:57:04 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE1ODY2?= =?utf-8?q?=3A_The_xmlcharrefreplace_error_handler_no_more_produces_two_XM?= =?utf-8?q?L?= Message-ID: <3c8cpm07v1z7LmD@mail.python.org> http://hg.python.org/cpython/rev/719ee60fc5e2 changeset: 85051:719ee60fc5e2 branch: 2.7 parent: 85041:395ac61ebe1a user: Serhiy Storchaka date: Tue Aug 06 16:56:26 2013 +0300 summary: Issue #15866: The xmlcharrefreplace error handler no more produces two XML entities for a non-BMP character on narrow build. files: Lib/test/test_codeccallbacks.py | 25 +++++- Lib/test/test_unicode.py | 12 +++ Misc/NEWS | 3 + Modules/_testcapimodule.c | 2 +- Objects/unicodeobject.c | 82 +++++++++++++++----- Python/codecs.c | 66 +++++++++------- 6 files changed, 135 insertions(+), 55 deletions(-) diff --git a/Lib/test/test_codeccallbacks.py b/Lib/test/test_codeccallbacks.py --- a/Lib/test/test_codeccallbacks.py +++ b/Lib/test/test_codeccallbacks.py @@ -66,15 +66,34 @@ # replace unencodable characters which numeric character entities. # For ascii, latin-1 and charmaps this is completely implemented # in C and should be reasonably fast. - s = u"\u30b9\u30d1\u30e2 \xe4nd eggs" + s = u"\u30b9\u30d1\u30e2 \xe4nd egg\u0161" self.assertEqual( s.encode("ascii", "xmlcharrefreplace"), - "スパモ änd eggs" + "スパモ änd eggš" ) self.assertEqual( s.encode("latin-1", "xmlcharrefreplace"), - "スパモ \xe4nd eggs" + "スパモ \xe4nd eggš" ) + self.assertEqual( + s.encode("iso-8859-15", "xmlcharrefreplace"), + "スパモ \xe4nd egg\xa8" + ) + + def test_xmlcharrefreplace_with_surrogates(self): + tests = [(u'\U0001f49d', '💝'), + (u'\ud83d', '�'), + (u'\udc9d', '�'), + (u'\ud83d\udc9d', '💝' if len(u'\U0001f49d') > 1 else + '��'), + ] + for encoding in ['ascii', 'latin1', 'iso-8859-15']: + for s, exp in tests: + self.assertEqual(s.encode(encoding, 'xmlcharrefreplace'), + exp, msg='%r.encode(%r)' % (s, encoding)) + self.assertEqual((s+'X').encode(encoding, 'xmlcharrefreplace'), + exp+'X', + msg='%r.encode(%r)' % (s + 'X', encoding)) def test_xmlcharnamereplace(self): # This time use a named character entity for unencodable diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -1658,6 +1658,18 @@ self.assertEqual(unicode_encodedecimal(u"123\u20ac\u0660", "replace"), b'123?0') + def test_encode_decimal_with_surrogates(self): + from _testcapi import unicode_encodedecimal + tests = [(u'\U0001f49d', '💝'), + (u'\ud83d', '�'), + (u'\udc9d', '�'), + (u'\ud83d\udc9d', '💝' if len(u'\U0001f49d') > 1 else + '��'), + ] + for s, exp in tests: + self.assertEqual( + unicode_encodedecimal(u"123" + s, "xmlcharrefreplace"), + '123' + exp) def test_main(): test_support.run_unittest(__name__) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -9,6 +9,9 @@ Core and Builtins ----------------- +- Issue #15866: The xmlcharrefreplace error handler no more produces two XML + entities for a non-BMP character on narrow build. + - Issue #18184: PyUnicode_FromFormat() and PyUnicode_FromFormatV() now raise OverflowError when an argument of %c format is out of range. diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -1118,7 +1118,7 @@ if (!PyArg_ParseTuple(args, "u#|s", &unicode, &length, &errors)) return NULL; - decimal_length = length * 7; /* len('€') */ + decimal_length = length * 10; /* len('􏿿') */ decimal = PyBytes_FromStringAndSize(NULL, decimal_length); if (decimal == NULL) return NULL; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -547,6 +547,37 @@ return PyUnicode_FromStringAndSize(u, size); } +/* _Py_UNICODE_NEXT is a private macro used to retrieve the character pointed + * by 'ptr', possibly combining surrogate pairs on narrow builds. + * 'ptr' and 'end' must be Py_UNICODE*, with 'ptr' pointing at the character + * that should be returned and 'end' pointing to the end of the buffer. + * ('end' is used on narrow builds to detect a lone surrogate at the + * end of the buffer that should be returned unchanged.) + * The ptr and end arguments should be side-effect free and ptr must an lvalue. + * The type of the returned char is always Py_UCS4. + * + * Note: the macro advances ptr to next char, so it might have side-effects + * (especially if used with other macros). + */ + +/* helper macros used by _Py_UNICODE_NEXT */ +#define _Py_UNICODE_IS_HIGH_SURROGATE(ch) (0xD800 <= ch && ch <= 0xDBFF) +#define _Py_UNICODE_IS_LOW_SURROGATE(ch) (0xDC00 <= ch && ch <= 0xDFFF) +/* Join two surrogate characters and return a single Py_UCS4 value. */ +#define _Py_UNICODE_JOIN_SURROGATES(high, low) \ + (((((Py_UCS4)(high) & 0x03FF) << 10) | \ + ((Py_UCS4)(low) & 0x03FF)) + 0x10000) + +#ifdef Py_UNICODE_WIDE +#define _Py_UNICODE_NEXT(ptr, end) *(ptr)++ +#else +#define _Py_UNICODE_NEXT(ptr, end) \ + (((_Py_UNICODE_IS_HIGH_SURROGATE(*(ptr)) && (ptr) < (end)) && \ + _Py_UNICODE_IS_LOW_SURROGATE((ptr)[1])) ? \ + ((ptr) += 2,_Py_UNICODE_JOIN_SURROGATES((ptr)[-2], (ptr)[-1])) : \ + (Py_UCS4)*(ptr)++) +#endif + #ifdef HAVE_WCHAR_H #if (Py_UNICODE_SIZE == 2) && defined(SIZEOF_WCHAR_T) && (SIZEOF_WCHAR_T == 4) @@ -3642,26 +3673,22 @@ case 4: /* xmlcharrefreplace */ respos = str-PyString_AS_STRING(res); /* determine replacement size (temporarily (mis)uses p) */ - for (p = collstart, repsize = 0; p < collend; ++p) { - if (*p<10) + for (p = collstart, repsize = 0; p < collend;) { + Py_UCS4 ch = _Py_UNICODE_NEXT(p, collend); + if (ch < 10) repsize += 2+1+1; - else if (*p<100) + else if (ch < 100) repsize += 2+2+1; - else if (*p<1000) + else if (ch < 1000) repsize += 2+3+1; - else if (*p<10000) + else if (ch < 10000) repsize += 2+4+1; -#ifndef Py_UNICODE_WIDE - else + else if (ch < 100000) repsize += 2+5+1; -#else - else if (*p<100000) - repsize += 2+5+1; - else if (*p<1000000) + else if (ch < 1000000) repsize += 2+6+1; else repsize += 2+7+1; -#endif } requiredsize = respos+repsize+(endp-collend); if (requiredsize > ressize) { @@ -3673,8 +3700,9 @@ ressize = requiredsize; } /* generate replacement (temporarily (mis)uses p) */ - for (p = collstart; p < collend; ++p) { - str += sprintf(str, "&#%d;", (int)*p); + for (p = collstart; p < collend;) { + Py_UCS4 ch = _Py_UNICODE_NEXT(p, collend); + str += sprintf(str, "&#%d;", (int)ch); } p = collend; break; @@ -4649,11 +4677,20 @@ *inpos = collendpos; break; case 4: /* xmlcharrefreplace */ - /* generate replacement (temporarily (mis)uses p) */ - for (collpos = collstartpos; collpos < collendpos; ++collpos) { + /* generate replacement */ + for (collpos = collstartpos; collpos < collendpos;) { char buffer[2+29+1+1]; char *cp; - sprintf(buffer, "&#%d;", (int)p[collpos]); + Py_UCS4 ch = p[collpos++]; +#ifndef Py_UNICODE_WIDE + if ((0xD800 <= ch && ch <= 0xDBFF) && + (collpos < collendpos) && + (0xDC00 <= p[collpos] && p[collpos] <= 0xDFFF)) { + ch = ((((ch & 0x03FF) << 10) | + ((Py_UCS4)p[collpos++] & 0x03FF)) + 0x10000); + } +#endif + sprintf(buffer, "&#%d;", (int)ch); for (cp = buffer; *cp; ++cp) { x = charmapencode_output(*cp, mapping, res, respos); if (x==enc_EXCEPTION) @@ -5068,10 +5105,11 @@ break; case 4: /* xmlcharrefreplace */ /* generate replacement (temporarily (mis)uses p) */ - for (p = collstart; p < collend; ++p) { + for (p = collstart; p < collend;) { char buffer[2+29+1+1]; char *cp; - sprintf(buffer, "&#%d;", (int)*p); + Py_UCS4 ch = _Py_UNICODE_NEXT(p, collend); + sprintf(buffer, "&#%d;", (int)ch); if (charmaptranslate_makespace(&res, &str, (str-PyUnicode_AS_UNICODE(res))+strlen(buffer)+(endp-collend))) goto onError; @@ -5222,8 +5260,10 @@ break; case 4: /* xmlcharrefreplace */ /* generate replacement (temporarily (mis)uses p) */ - for (p = collstart; p < collend; ++p) - output += sprintf(output, "&#%d;", (int)*p); + for (p = collstart; p < collend;) { + Py_UCS4 ch = _Py_UNICODE_NEXT(p, collend); + output += sprintf(output, "&#%d;", ch); + } p = collend; break; default: diff --git a/Python/codecs.c b/Python/codecs.c --- a/Python/codecs.c +++ b/Python/codecs.c @@ -556,6 +556,7 @@ PyObject *res; Py_UNICODE *p; Py_UNICODE *startp; + Py_UNICODE *e; Py_UNICODE *outp; int ressize; if (PyUnicodeEncodeError_GetStart(exc, &start)) @@ -565,26 +566,31 @@ if (!(object = PyUnicodeEncodeError_GetObject(exc))) return NULL; startp = PyUnicode_AS_UNICODE(object); - for (p = startp+start, ressize = 0; p < startp+end; ++p) { - if (*p<10) + e = startp + end; + for (p = startp+start, ressize = 0; p < e;) { + Py_UCS4 ch = *p++; +#ifndef Py_UNICODE_WIDE + if ((0xD800 <= ch && ch <= 0xDBFF) && + (p < e) && + (0xDC00 <= *p && *p <= 0xDFFF)) { + ch = ((((ch & 0x03FF) << 10) | + ((Py_UCS4)*p++ & 0x03FF)) + 0x10000); + } +#endif + if (ch < 10) ressize += 2+1+1; - else if (*p<100) + else if (ch < 100) ressize += 2+2+1; - else if (*p<1000) + else if (ch < 1000) ressize += 2+3+1; - else if (*p<10000) + else if (ch < 10000) ressize += 2+4+1; -#ifndef Py_UNICODE_WIDE - else + else if (ch < 100000) ressize += 2+5+1; -#else - else if (*p<100000) - ressize += 2+5+1; - else if (*p<1000000) + else if (ch < 1000000) ressize += 2+6+1; else ressize += 2+7+1; -#endif } /* allocate replacement */ res = PyUnicode_FromUnicode(NULL, ressize); @@ -593,40 +599,41 @@ return NULL; } /* generate replacement */ - for (p = startp+start, outp = PyUnicode_AS_UNICODE(res); - p < startp+end; ++p) { - Py_UNICODE c = *p; + for (p = startp+start, outp = PyUnicode_AS_UNICODE(res); p < e;) { int digits; int base; + Py_UCS4 ch = *p++; +#ifndef Py_UNICODE_WIDE + if ((0xD800 <= ch && ch <= 0xDBFF) && + (p < startp+end) && + (0xDC00 <= *p && *p <= 0xDFFF)) { + ch = ((((ch & 0x03FF) << 10) | + ((Py_UCS4)*p++ & 0x03FF)) + 0x10000); + } +#endif *outp++ = '&'; *outp++ = '#'; - if (*p<10) { + if (ch < 10) { digits = 1; base = 1; } - else if (*p<100) { + else if (ch < 100) { digits = 2; base = 10; } - else if (*p<1000) { + else if (ch < 1000) { digits = 3; base = 100; } - else if (*p<10000) { + else if (ch < 10000) { digits = 4; base = 1000; } -#ifndef Py_UNICODE_WIDE - else { + else if (ch < 100000) { digits = 5; base = 10000; } -#else - else if (*p<100000) { - digits = 5; - base = 10000; - } - else if (*p<1000000) { + else if (ch < 1000000) { digits = 6; base = 100000; } @@ -634,10 +641,9 @@ digits = 7; base = 1000000; } -#endif while (digits-->0) { - *outp++ = '0' + c/base; - c %= base; + *outp++ = '0' + ch/base; + ch %= base; base /= 10; } *outp++ = ';'; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 16:03:47 2013 From: python-checkins at python.org (christian.heimes) Date: Tue, 6 Aug 2013 16:03:47 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4MzY4?= =?utf-8?q?=3A_PyOS=5FStdioReadline=28=29_no_longer_leaks_memory_when_real?= =?utf-8?b?bG9jKCkgZmFpbHMu?= Message-ID: <3c8cyW2zHsz7Lm8@mail.python.org> http://hg.python.org/cpython/rev/5859a3ec5b7e changeset: 85052:5859a3ec5b7e branch: 3.3 parent: 85042:791034a0ae1e user: Christian Heimes date: Tue Aug 06 15:59:16 2013 +0200 summary: Issue #18368: PyOS_StdioReadline() no longer leaks memory when realloc() fails. files: Misc/NEWS | 3 +++ Parser/myreadline.c | 24 ++++++++++++++++++------ 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -12,6 +12,9 @@ Core and Builtins ----------------- +- Issue #18368: PyOS_StdioReadline() no longer leaks memory when realloc() + fails. + - Issue #16741: Fix an error reporting in int(). - Issue #17899: Fix rare file descriptor leak in os.listdir(). diff --git a/Parser/myreadline.c b/Parser/myreadline.c --- a/Parser/myreadline.c +++ b/Parser/myreadline.c @@ -112,7 +112,7 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, char *prompt) { size_t n; - char *p; + char *p, *pr; n = 100; if ((p = (char *)PyMem_MALLOC(n)) == NULL) return NULL; @@ -135,17 +135,29 @@ n = strlen(p); while (n > 0 && p[n-1] != '\n') { size_t incr = n+2; - p = (char *)PyMem_REALLOC(p, n + incr); - if (p == NULL) + if (incr > INT_MAX) { + PyMem_FREE(p); + PyErr_SetString(PyExc_OverflowError, "input line too long"); return NULL; - if (incr > INT_MAX) { - PyErr_SetString(PyExc_OverflowError, "input line too long"); } + pr = (char *)PyMem_REALLOC(p, n + incr); + if (pr == NULL) { + PyMem_FREE(p); + PyErr_NoMemory(); + return NULL; + } + p = pr; if (my_fgets(p+n, (int)incr, sys_stdin) != 0) break; n += strlen(p+n); } - return (char *)PyMem_REALLOC(p, n+1); + pr = (char *)PyMem_REALLOC(p, n+1); + if (pr == NULL) { + PyMem_FREE(p); + PyErr_NoMemory(); + return NULL; + } + return pr; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 16:03:48 2013 From: python-checkins at python.org (christian.heimes) Date: Tue, 6 Aug 2013 16:03:48 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318368=3A_PyOS=5FStdioReadline=28=29_no_longer_l?= =?utf-8?q?eaks_memory_when_realloc=28=29_fails=2E?= Message-ID: <3c8cyX54C2z7Lm9@mail.python.org> http://hg.python.org/cpython/rev/6dbc4d6ff31e changeset: 85053:6dbc4d6ff31e parent: 85050:80e9cb6163b4 parent: 85052:5859a3ec5b7e user: Christian Heimes date: Tue Aug 06 16:03:33 2013 +0200 summary: Issue #18368: PyOS_StdioReadline() no longer leaks memory when realloc() fails. files: Misc/NEWS | 3 +++ Parser/myreadline.c | 24 ++++++++++++++++++------ 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #18368: PyOS_StdioReadline() no longer leaks memory when realloc() + fail + - Issue #17934: Add a clear() method to frame objects, to help clean up expensive details (local variables) and break reference cycles. diff --git a/Parser/myreadline.c b/Parser/myreadline.c --- a/Parser/myreadline.c +++ b/Parser/myreadline.c @@ -112,7 +112,7 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, char *prompt) { size_t n; - char *p; + char *p, *pr; n = 100; if ((p = (char *)PyMem_MALLOC(n)) == NULL) return NULL; @@ -135,17 +135,29 @@ n = strlen(p); while (n > 0 && p[n-1] != '\n') { size_t incr = n+2; - p = (char *)PyMem_REALLOC(p, n + incr); - if (p == NULL) + if (incr > INT_MAX) { + PyMem_FREE(p); + PyErr_SetString(PyExc_OverflowError, "input line too long"); return NULL; - if (incr > INT_MAX) { - PyErr_SetString(PyExc_OverflowError, "input line too long"); } + pr = (char *)PyMem_REALLOC(p, n + incr); + if (pr == NULL) { + PyMem_FREE(p); + PyErr_NoMemory(); + return NULL; + } + p = pr; if (my_fgets(p+n, (int)incr, sys_stdin) != 0) break; n += strlen(p+n); } - return (char *)PyMem_REALLOC(p, n+1); + pr = (char *)PyMem_REALLOC(p, n+1); + if (pr == NULL) { + PyMem_FREE(p); + PyErr_NoMemory(); + return NULL; + } + return pr; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 18:52:37 2013 From: python-checkins at python.org (ezio.melotti) Date: Tue, 6 Aug 2013 18:52:37 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_=2318443=3A_remove_the_Tex?= =?utf-8?q?tMate_entry_now_that_the_bundle_has_been_removed_and_fix?= Message-ID: <3c8hjK6q6Hz7Lm0@mail.python.org> http://hg.python.org/cpython/rev/5c74755e8f04 changeset: 85054:5c74755e8f04 user: Ezio Melotti date: Tue Aug 06 19:52:25 2013 +0300 summary: #18443: remove the TextMate entry now that the bundle has been removed and fix ordering. Patch by F?vry Thibault. files: Misc/README | 3 +-- 1 files changed, 1 insertions(+), 2 deletions(-) diff --git a/Misc/README b/Misc/README --- a/Misc/README +++ b/Misc/README @@ -22,8 +22,7 @@ README.coverity Information about running Coverity's Prevent on Python README.valgrind Information for Valgrind users, see valgrind-python.supp RPM (Old) tools to build RPMs +SpecialBuilds.txt Describes extra symbols you can set for debug builds svnmap.txt Map of old SVN revs and branches to hg changeset ids -SpecialBuilds.txt Describes extra symbols you can set for debug builds -TextMate A TextMate bundle for Python development valgrind-python.supp Valgrind suppression file, see README.valgrind vgrindefs Python configuration for vgrind (a generic pretty printer) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 20:59:03 2013 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 6 Aug 2013 20:59:03 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbjogSW4gX1B5R0NfRmluaSgpLCBs?= =?utf-8?q?ose_the_reference_that_was_kept_to_the_time_module?= Message-ID: <3c8lWC3TsQz7Lm0@mail.python.org> http://hg.python.org/cpython/rev/ff91c8b9c693 changeset: 85055:ff91c8b9c693 user: Antoine Pitrou date: Tue Aug 06 20:50:48 2013 +0200 summary: In _PyGC_Fini(), lose the reference that was kept to the time module files: Modules/gcmodule.c | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1661,6 +1661,7 @@ _PyGC_Fini(void) { Py_CLEAR(callbacks); + Py_CLEAR(tmod); } /* for debugging */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 22:50:23 2013 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 6 Aug 2013 22:50:23 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Improve_verbose_reporting_?= =?utf-8?q?of_shutdown_phase_by_using_the_=22public=22_module_name?= Message-ID: <3c8nzg70m1z7Lld@mail.python.org> http://hg.python.org/cpython/rev/41b1a2bbd3b6 changeset: 85056:41b1a2bbd3b6 user: Antoine Pitrou date: Tue Aug 06 22:50:15 2013 +0200 summary: Improve verbose reporting of shutdown phase by using the "public" module name files: Python/import.c | 13 +++++++------ 1 files changed, 7 insertions(+), 6 deletions(-) diff --git a/Python/import.c b/Python/import.c --- a/Python/import.c +++ b/Python/import.c @@ -345,17 +345,17 @@ for diagnosis messages (in verbose mode), while the weakref helps detect those modules which have been held alive. */ weaklist = PyList_New(0); + if (weaklist == NULL) + PyErr_Clear(); -#define STORE_MODULE_WEAKREF(mod) \ +#define STORE_MODULE_WEAKREF(name, mod) \ if (weaklist != NULL) { \ - PyObject *name = PyModule_GetNameObject(mod); \ PyObject *wr = PyWeakref_NewRef(mod, NULL); \ if (name && wr) { \ PyObject *tup = PyTuple_Pack(2, name, wr); \ PyList_Append(weaklist, tup); \ Py_XDECREF(tup); \ } \ - Py_XDECREF(name); \ Py_XDECREF(wr); \ if (PyErr_Occurred()) \ PyErr_Clear(); \ @@ -368,7 +368,7 @@ if (PyModule_Check(value)) { if (Py_VerboseFlag && PyUnicode_Check(key)) PySys_FormatStderr("# cleanup[2] removing %U\n", key, value); - STORE_MODULE_WEAKREF(value); + STORE_MODULE_WEAKREF(key, value); PyDict_SetItem(modules, key, Py_None); } } @@ -394,14 +394,15 @@ n = PyList_GET_SIZE(weaklist); for (i = 0; i < n; i++) { PyObject *tup = PyList_GET_ITEM(weaklist, i); + PyObject *name = PyTuple_GET_ITEM(tup, 0); PyObject *mod = PyWeakref_GET_OBJECT(PyTuple_GET_ITEM(tup, 1)); if (mod == Py_None) continue; Py_INCREF(mod); assert(PyModule_Check(mod)); - if (Py_VerboseFlag) + if (Py_VerboseFlag && PyUnicode_Check(name)) PySys_FormatStderr("# cleanup[3] wiping %U\n", - PyTuple_GET_ITEM(tup, 0), mod); + name, mod); _PyModule_Clear(mod); Py_DECREF(mod); } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 22:57:39 2013 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 6 Aug 2013 22:57:39 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318621=3A_Prevent_?= =?utf-8?q?the_site_module=27s_patched_builtins_from_keeping_too_many?= Message-ID: <3c8p8315bDz7Lld@mail.python.org> http://hg.python.org/cpython/rev/8584f63e570e changeset: 85057:8584f63e570e user: Antoine Pitrou date: Tue Aug 06 22:56:40 2013 +0200 summary: Issue #18621: Prevent the site module's patched builtins from keeping too many references alive for too long. files: Lib/_sitebuiltins.py | 100 ++++++++++++++++++++++++++++++ Lib/site.py | 101 ++---------------------------- Misc/NEWS | 3 + 3 files changed, 111 insertions(+), 93 deletions(-) diff --git a/Lib/_sitebuiltins.py b/Lib/_sitebuiltins.py new file mode 100644 --- /dev/null +++ b/Lib/_sitebuiltins.py @@ -0,0 +1,100 @@ +""" +The objects used by the site module to add custom builtins. +""" + +# Those objects are almost immortal and they keep a reference to their module +# globals. Defining them in the site module would keep too many references +# alive. +# Note this means this module should also avoid keep things alive in its +# globals. + +import sys + +class Quitter(object): + def __init__(self, name, eof): + self.name = name + self.eof = eof + def __repr__(self): + return 'Use %s() or %s to exit' % (self.name, self.eof) + def __call__(self, code=None): + # Shells like IDLE catch the SystemExit, but listen when their + # stdin wrapper is closed. + try: + sys.stdin.close() + except: + pass + raise SystemExit(code) + + +class _Printer(object): + """interactive prompt objects for printing the license text, a list of + contributors and the copyright notice.""" + + MAXLINES = 23 + + def __init__(self, name, data, files=(), dirs=()): + import os + self.__name = name + self.__data = data + self.__lines = None + self.__filenames = [os.path.join(dir, filename) + for dir in dirs + for filename in files] + + def __setup(self): + if self.__lines: + return + data = None + for filename in self.__filenames: + try: + with open(filename, "r") as fp: + data = fp.read() + break + except OSError: + pass + if not data: + data = self.__data + self.__lines = data.split('\n') + self.__linecnt = len(self.__lines) + + def __repr__(self): + self.__setup() + if len(self.__lines) <= self.MAXLINES: + return "\n".join(self.__lines) + else: + return "Type %s() to see the full %s text" % ((self.__name,)*2) + + def __call__(self): + self.__setup() + prompt = 'Hit Return for more, or q (and Return) to quit: ' + lineno = 0 + while 1: + try: + for i in range(lineno, lineno + self.MAXLINES): + print(self.__lines[i]) + except IndexError: + break + else: + lineno += self.MAXLINES + key = None + while key is None: + key = input(prompt) + if key not in ('', 'q'): + key = None + if key == 'q': + break + + +class _Helper(object): + """Define the builtin 'help'. + This is a wrapper around pydoc.help (with a twist). + + """ + + def __repr__(self): + return "Type help() for interactive help, " \ + "or help(object) for help about object." + def __call__(self, *args, **kwds): + import pydoc + return pydoc.help(*args, **kwds) + diff --git a/Lib/site.py b/Lib/site.py --- a/Lib/site.py +++ b/Lib/site.py @@ -72,6 +72,7 @@ import os import re import builtins +import _sitebuiltins # Prefixes for site-packages; add additional prefixes like /usr/local here PREFIXES = [sys.prefix, sys.exec_prefix] @@ -344,116 +345,30 @@ else: eof = 'Ctrl-D (i.e. EOF)' - class Quitter(object): - def __init__(self, name): - self.name = name - def __repr__(self): - return 'Use %s() or %s to exit' % (self.name, eof) - def __call__(self, code=None): - # Shells like IDLE catch the SystemExit, but listen when their - # stdin wrapper is closed. - try: - sys.stdin.close() - except: - pass - raise SystemExit(code) - builtins.quit = Quitter('quit') - builtins.exit = Quitter('exit') + builtins.quit = _sitebuiltins.Quitter('quit', eof) + builtins.exit = _sitebuiltins.Quitter('exit', eof) -class _Printer(object): - """interactive prompt objects for printing the license text, a list of - contributors and the copyright notice.""" - - MAXLINES = 23 - - def __init__(self, name, data, files=(), dirs=()): - self.__name = name - self.__data = data - self.__files = files - self.__dirs = dirs - self.__lines = None - - def __setup(self): - if self.__lines: - return - data = None - for dir in self.__dirs: - for filename in self.__files: - filename = os.path.join(dir, filename) - try: - with open(filename, "r") as fp: - data = fp.read() - break - except OSError: - pass - if data: - break - if not data: - data = self.__data - self.__lines = data.split('\n') - self.__linecnt = len(self.__lines) - - def __repr__(self): - self.__setup() - if len(self.__lines) <= self.MAXLINES: - return "\n".join(self.__lines) - else: - return "Type %s() to see the full %s text" % ((self.__name,)*2) - - def __call__(self): - self.__setup() - prompt = 'Hit Return for more, or q (and Return) to quit: ' - lineno = 0 - while 1: - try: - for i in range(lineno, lineno + self.MAXLINES): - print(self.__lines[i]) - except IndexError: - break - else: - lineno += self.MAXLINES - key = None - while key is None: - key = input(prompt) - if key not in ('', 'q'): - key = None - if key == 'q': - break - def setcopyright(): """Set 'copyright' and 'credits' in builtins""" - builtins.copyright = _Printer("copyright", sys.copyright) + builtins.copyright = _sitebuiltins._Printer("copyright", sys.copyright) if sys.platform[:4] == 'java': - builtins.credits = _Printer( + builtins.credits = _sitebuiltins._Printer( "credits", "Jython is maintained by the Jython developers (www.jython.org).") else: - builtins.credits = _Printer("credits", """\ + builtins.credits = _sitebuiltins._Printer("credits", """\ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands for supporting Python development. See www.python.org for more information.""") here = os.path.dirname(os.__file__) - builtins.license = _Printer( + builtins.license = _sitebuiltins._Printer( "license", "See http://www.python.org/%.3s/license.html" % sys.version, ["LICENSE.txt", "LICENSE"], [os.path.join(here, os.pardir), here, os.curdir]) -class _Helper(object): - """Define the builtin 'help'. - This is a wrapper around pydoc.help (with a twist). - - """ - - def __repr__(self): - return "Type help() for interactive help, " \ - "or help(object) for help about object." - def __call__(self, *args, **kwds): - import pydoc - return pydoc.help(*args, **kwds) - def sethelper(): - builtins.help = _Helper() + builtins.help = _sitebuiltins._Helper() def enablerlcompleter(): """Enable default readline configuration on interactive prompts, by diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -19,6 +19,9 @@ Library ------- +- Issue #18621: Prevent the site module's patched builtins from keeping + too many references alive for too long. + - Issue #4885: Add weakref support to mmap objects. Patch by Valerie Lambert. - Issue #8860: Fixed rounding in timedelta constructor. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 22:57:40 2013 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 6 Aug 2013 22:57:40 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Normalize_whitespace?= Message-ID: <3c8p842vqZz7Lld@mail.python.org> http://hg.python.org/cpython/rev/ae125a558436 changeset: 85058:ae125a558436 user: Antoine Pitrou date: Tue Aug 06 22:57:31 2013 +0200 summary: Normalize whitespace files: Lib/_sitebuiltins.py | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Lib/_sitebuiltins.py b/Lib/_sitebuiltins.py --- a/Lib/_sitebuiltins.py +++ b/Lib/_sitebuiltins.py @@ -97,4 +97,3 @@ def __call__(self, *args, **kwds): import pydoc return pydoc.help(*args, **kwds) - -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 23:05:32 2013 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 6 Aug 2013 23:05:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318666=3A_improve_?= =?utf-8?q?test=5Fframe_a_bit=2E_Patch_by_Vajrasky_Kok=2E?= Message-ID: <3c8pK80sMFz7LkN@mail.python.org> http://hg.python.org/cpython/rev/34e1ecb8edd2 changeset: 85059:34e1ecb8edd2 user: Antoine Pitrou date: Tue Aug 06 23:05:23 2013 +0200 summary: Issue #18666: improve test_frame a bit. Patch by Vajrasky Kok. files: Lib/test/test_frame.py | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -90,6 +90,9 @@ gen = g() f = next(gen) self.assertFalse(endly) + # Clearing the frame closes the generator + f.clear() + self.assertTrue(endly) @support.cpython_only def test_clear_refcycles(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 6 23:07:09 2013 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 6 Aug 2013 23:07:09 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318665=3A_fix_typo?= =?utf-8?q?s=2E__Patch_by_Vajrasky_Kok=2E?= Message-ID: <3c8pM10ZMMz7LkN@mail.python.org> http://hg.python.org/cpython/rev/89ce323357db changeset: 85060:89ce323357db user: Antoine Pitrou date: Tue Aug 06 23:06:59 2013 +0200 summary: Issue #18665: fix typos. Patch by Vajrasky Kok. files: Include/frameobject.h | 2 +- Lib/test/test_frame.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Include/frameobject.h b/Include/frameobject.h --- a/Include/frameobject.h +++ b/Include/frameobject.h @@ -36,7 +36,7 @@ non-generator frames. See the save_exc_state and swap_exc_state functions in ceval.c for details of their use. */ PyObject *f_exc_type, *f_exc_value, *f_exc_traceback; - /* Borrowed referenced to a generator, or NULL */ + /* Borrowed reference to a generator, or NULL */ PyObject *f_gen; PyThreadState *f_tstate; diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -96,7 +96,7 @@ @support.cpython_only def test_clear_refcycles(self): - # .clear() doesn't leave any refcycle behin + # .clear() doesn't leave any refcycle behind with support.disable_gc(): class C: pass -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 7 01:39:02 2013 From: python-checkins at python.org (victor.stinner) Date: Wed, 7 Aug 2013 01:39:02 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_add_a_section_=22P?= =?utf-8?q?erformances_of_Closing_All_File_Descriptors=22?= Message-ID: <3c8skG07SDz7LkN@mail.python.org> http://hg.python.org/peps/rev/3dbfe0c66c7e changeset: 5036:3dbfe0c66c7e user: Victor Stinner date: Wed Aug 07 01:31:07 2013 +0200 summary: PEP 446: add a section "Performances of Closing All File Descriptors" files: pep-0446.txt | 45 +++++++++++++++++++++++++++++++++++---- 1 files changed, 40 insertions(+), 5 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -14,11 +14,15 @@ ======== Leaking file descriptors in child processes causes various annoying -issues and is a known major security vulnerability. This PEP proposes to -make all file descriptors created by Python non-inheritable by default -to reduces the risk of these issues. This PEP fixes also a race -condition in multithreaded applications on operating systems supporting -atomic flags to create non-inheritable file descriptors. +issues and is a known major security vulnerability. Using the +``subprocess`` module with the *close_fds* parameter set to ``True`` is +not possible in some cases, and has poor performances on some platforms. + +This PEP proposes to make all file descriptors created by Python +non-inheritable by default to reduces the risk of these issues. This PEP +fixes also a race condition in multithreaded applications on operating +systems supporting atomic flags to create non-inheritable file +descriptors. Rationale @@ -276,6 +280,27 @@ subprocess are inherited +Performances of Closing All File Descriptors +-------------------------------------------- + +On UNIX, the subprocess module closes almost all file descriptors in the +child process. This operation require MAXFD system calls where MAXFD is +the maximum number of file descriptors, even if there are few open file +descriptors. This maximum can be get using: ``sysconf("SC_OPEN_MAX")``. + +The operation can be slow if MAXFD is large. For example, on a FreeBSD +buildbot with ``MAXFD=655,000``, the operation took 0.3 second: see +`issue #11284: slow close file descriptors +`_). + +On Linux, Python gets the list of all open file descriptors from +``/proc//fd/``, and so performances depends on the number of open +file descriptors, not on MAXFD. + +See also the `issue #1663329: subprocess close_fds perform poor if +SC_OPEN_MAX is high `_. + + Proposal ======== @@ -335,6 +360,16 @@ (stdin), ``1`` (stdout) or ``2`` (stderr) and *fd2* is different than *fd*. +Since Python should only create non-inheritable file descriptors, it is +safe to use subprocess with the *close_fds* parameter set to ``False``. +Not closing explicitly file descriptors is faster, especially on +platform with a large maximum number of file descriptors. + +The default value of the *close_fds* parameter is unchanged, because +third party modules, especially extensions implemented in C, may not +conform immediatly to the PEP 446 (still create inheritable file +descriptors). + Backward Compatibility ====================== -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Aug 7 01:39:03 2013 From: python-checkins at python.org (victor.stinner) Date: Wed, 7 Aug 2013 01:39:03 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_add_a_reference_to?= =?utf-8?q?_a_previous_attempt_in_2007?= Message-ID: <3c8skH1gxpz7LkN@mail.python.org> http://hg.python.org/peps/rev/0029e8fa9bcd changeset: 5037:0029e8fa9bcd user: Victor Stinner date: Wed Aug 07 01:38:48 2013 +0200 summary: PEP 446: add a reference to a previous attempt in 2007 files: pep-0446.txt | 14 ++++++++++++++ 1 files changed, 14 insertions(+), 0 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -404,6 +404,19 @@ Rejected Alternatives ===================== +Add a new open_noinherit() function +----------------------------------- + +In June 2007, Henning von Bargen proposed on the python-dev mailing list +to add a new open_noinherit() function to fix issues of inherited file +descriptors in child processes. At this time, the default value of the +*close_fds* parameter of the subprocess module was ``False``. + +Read the mail thread: `[Python-Dev] Proposal for a new function +"open_noinherit" to avoid problems with subprocesses and security risks +`_. + + PEP 433 ------- @@ -411,6 +424,7 @@ is a previous attempt proposing various other alternatives, but no consensus could be reached. + No special case for standard streams ------------------------------------ -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Aug 7 01:43:22 2013 From: python-checkins at python.org (victor.stinner) Date: Wed, 7 Aug 2013 01:43:22 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_Windows_creates_no?= =?utf-8?q?n-inheritable_*handles*_=28not_fds=29?= Message-ID: <3c8sqG2BPXz7LqV@mail.python.org> http://hg.python.org/peps/rev/36674bbbb2c7 changeset: 5038:36674bbbb2c7 user: Victor Stinner date: Wed Aug 07 01:41:46 2013 +0200 summary: PEP 446: Windows creates non-inheritable *handles* (not fds) files: pep-0446.txt | 9 +++++---- 1 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -32,10 +32,11 @@ ------------------------------- Each operating system handles the inheritance of file descriptors -differently. Windows creates non-inheritable file descriptors by -default, whereas UNIX creates inheritable file descriptors by default. -Python prefers the POSIX API over the native Windows API to have a -single code base, and so it creates inheritable file descriptors. +differently. Windows creates non-inheritable handles by default, whereas +UNIX and the POSIX API of Windows create inheritable file descriptors by +default. Python prefers the POSIX API over the native Windows API to +have a single code base and to use the same type for file descriptors, +and so it creates inheritable file descriptors. There is one exception: ``os.pipe()`` creates non-inheritable pipes on Windows, whereas it creates inheritable pipes on UNIX. The reason is an -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Aug 7 01:43:23 2013 From: python-checkins at python.org (victor.stinner) Date: Wed, 7 Aug 2013 01:43:23 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_complete_the_summa?= =?utf-8?q?ry_table_of_the_status_of_python_3=2E3?= Message-ID: <3c8sqH3kXwz7Lnh@mail.python.org> http://hg.python.org/peps/rev/49a5889049c3 changeset: 5039:49a5889049c3 user: Victor Stinner date: Wed Aug 07 01:43:01 2013 +0200 summary: PEP 446: complete the summary table of the status of python 3.3 files: pep-0446.txt | 3 ++- 1 files changed, 2 insertions(+), 1 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -265,9 +265,10 @@ Module FD on UNIX Handles on Windows FD on Windows =========================== ============= ================== ============= subprocess, default STD, pass_fds none STD +subprocess, replace stdout STD, pass_fds all STD subprocess, close_fds=False all all STD +multiprocessing all all STD os.execv(), os.spawn() all all all -multiprocessing all all STD =========================== ============= ================== ============= Legend: -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Aug 7 01:50:08 2013 From: python-checkins at python.org (victor.stinner) Date: Wed, 7 Aug 2013 01:50:08 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_cleanup_recent_add?= =?utf-8?q?itions?= Message-ID: <3c8sz46WhCz7Lql@mail.python.org> http://hg.python.org/peps/rev/55e11a621162 changeset: 5040:55e11a621162 user: Victor Stinner date: Wed Aug 07 01:49:59 2013 +0200 summary: PEP 446: cleanup recent additions files: pep-0446.txt | 15 ++++++++------- 1 files changed, 8 insertions(+), 7 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -285,17 +285,18 @@ Performances of Closing All File Descriptors -------------------------------------------- -On UNIX, the subprocess module closes almost all file descriptors in the -child process. This operation require MAXFD system calls where MAXFD is -the maximum number of file descriptors, even if there are few open file -descriptors. This maximum can be get using: ``sysconf("SC_OPEN_MAX")``. +On UNIX, the ``subprocess`` module closes almost all file descriptors in +the child process. This operation require MAXFD system calls, where +MAXFD is the maximum number of file descriptors, even if there are only +few open file descriptors. This maximum can be read using: +``sysconf("SC_OPEN_MAX")``. The operation can be slow if MAXFD is large. For example, on a FreeBSD -buildbot with ``MAXFD=655,000``, the operation took 0.3 second: see +buildbot with ``MAXFD=655,000``, the operation took 300 ms: see `issue #11284: slow close file descriptors -`_). +`_. -On Linux, Python gets the list of all open file descriptors from +On Linux, Python 3.3 gets the list of all open file descriptors from ``/proc//fd/``, and so performances depends on the number of open file descriptors, not on MAXFD. -- Repository URL: http://hg.python.org/peps From solipsis at pitrou.net Wed Aug 7 05:48:08 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Wed, 07 Aug 2013 05:48:08 +0200 Subject: [Python-checkins] Daily reference leaks (89ce323357db): sum=-1 Message-ID: results for 89ce323357db on branch "default" -------------------------------------------- test_support leaked [0, 0, -1] references, sum=-1 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/refloge19hJ2', '-x'] From python-checkins at python.org Wed Aug 7 14:54:58 2013 From: python-checkins at python.org (eli.bendersky) Date: Wed, 7 Aug 2013 14:54:58 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NjY4?= =?utf-8?q?=3A_Properly_document_setting_m=5Fsize_in_PyModuleDef?= Message-ID: <3c9CNf28M8z7Lr1@mail.python.org> http://hg.python.org/cpython/rev/698fd628b001 changeset: 85061:698fd628b001 branch: 3.3 parent: 85052:5859a3ec5b7e user: Eli Bendersky date: Wed Aug 07 05:52:20 2013 -0700 summary: Issue #18668: Properly document setting m_size in PyModuleDef files: Doc/c-api/module.rst | 14 ++++++++++---- 1 files changed, 10 insertions(+), 4 deletions(-) diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -182,16 +182,22 @@ .. c:member:: Py_ssize_t m_size - If the module object needs additional memory, this should be set to the - number of bytes to allocate; a pointer to the block of memory can be - retrieved with :c:func:`PyModule_GetState`. If no memory is needed, set - this to ``-1``. + Some modules allow re-initialization (calling their ``PyInit_*`` function + more than once). These modules should keep their state in a per-module + memory area that can be retrieved with :c:func:`PyModule_GetState`. This memory should be used, rather than static globals, to hold per-module state, since it is then safe for use in multiple sub-interpreters. It is freed when the module object is deallocated, after the :c:member:`m_free` function has been called, if present. + Setting ``m_size`` to a positive value specifies the size of the additional + memory required by the module. Setting it to ``-1`` means that the module can + not be re-initialized because it has global state. Setting it to ``0`` is + forbidden. + + See :PEP:`3121` for more details. + .. c:member:: PyMethodDef* m_methods A pointer to a table of module-level functions, described by -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 7 14:54:59 2013 From: python-checkins at python.org (eli.bendersky) Date: Wed, 7 Aug 2013 14:54:59 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Closing_=2318668=3A_Properly_document_setting_m=5Fsize_i?= =?utf-8?q?n_PyModuleDef?= Message-ID: <3c9CNg47HJz7Lm6@mail.python.org> http://hg.python.org/cpython/rev/9877c25d9556 changeset: 85062:9877c25d9556 parent: 85060:89ce323357db parent: 85061:698fd628b001 user: Eli Bendersky date: Wed Aug 07 05:54:28 2013 -0700 summary: Closing #18668: Properly document setting m_size in PyModuleDef files: Doc/c-api/module.rst | 14 ++++++++++---- 1 files changed, 10 insertions(+), 4 deletions(-) diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -189,16 +189,22 @@ .. c:member:: Py_ssize_t m_size - If the module object needs additional memory, this should be set to the - number of bytes to allocate; a pointer to the block of memory can be - retrieved with :c:func:`PyModule_GetState`. If no memory is needed, set - this to ``-1``. + Some modules allow re-initialization (calling their ``PyInit_*`` function + more than once). These modules should keep their state in a per-module + memory area that can be retrieved with :c:func:`PyModule_GetState`. This memory should be used, rather than static globals, to hold per-module state, since it is then safe for use in multiple sub-interpreters. It is freed when the module object is deallocated, after the :c:member:`m_free` function has been called, if present. + Setting ``m_size`` to a positive value specifies the size of the additional + memory required by the module. Setting it to ``-1`` means that the module can + not be re-initialized because it has global state. Setting it to ``0`` is + forbidden. + + See :PEP:`3121` for more details. + .. c:member:: PyMethodDef* m_methods A pointer to a table of module-level functions, described by -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 7 16:14:59 2013 From: python-checkins at python.org (brett.cannon) Date: Wed, 7 Aug 2013 16:14:59 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?devinabox=3A_Note_that_the_code_is_Py?= =?utf-8?q?thon_2/3_compatible?= Message-ID: <3c9F8z0Dwvz7LtQ@mail.python.org> http://hg.python.org/devinabox/rev/77f94e1cf427 changeset: 53:77f94e1cf427 user: Brett Cannon date: Tue Aug 06 11:42:51 2013 -0400 summary: Note that the code is Python 2/3 compatible files: build_cpython.py | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/build_cpython.py b/build_cpython.py --- a/build_cpython.py +++ b/build_cpython.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# Source is Python 2/3 compatible. """Build CPython on UNIX. On all platforms, return the path to the executable. -- Repository URL: http://hg.python.org/devinabox From python-checkins at python.org Wed Aug 7 16:15:00 2013 From: python-checkins at python.org (brett.cannon) Date: Wed, 7 Aug 2013 16:15:00 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?devinabox=3A_Ignore_expected_download?= =?utf-8?q?ed_files_and_venvs?= Message-ID: <3c9F901vq5z7LtQ@mail.python.org> http://hg.python.org/devinabox/rev/9927d1243787 changeset: 54:9927d1243787 user: Brett Cannon date: Wed Aug 07 10:14:24 2013 -0400 summary: Ignore expected downloaded files and venvs files: .hgignore | 12 +++++++++--- 1 files changed, 9 insertions(+), 3 deletions(-) diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -5,10 +5,16 @@ cpython devguide peps -# Unversioned downloads -Visual C++ Express/vc_web.exe + +# Downloads +coverage-* +mercurial-* +setuptools-* +tortoisehg-* + # Generated coverage_report -original_coverage_report +venv + # Misc __pycache__ -- Repository URL: http://hg.python.org/devinabox From python-checkins at python.org Wed Aug 7 16:15:01 2013 From: python-checkins at python.org (brett.cannon) Date: Wed, 7 Aug 2013 16:15:01 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?devinabox=3A_Update_the_README_on_how?= =?utf-8?q?_to_generate_a_coverage_report=2E_Also_negates?= Message-ID: <3c9F914djsz7LtT@mail.python.org> http://hg.python.org/devinabox/rev/cb783f26d952 changeset: 55:cb783f26d952 user: Brett Cannon date: Wed Aug 07 10:14:47 2013 -0400 summary: Update the README on how to generate a coverage report. Also negates the need for full_coverage.py. files: README | 103 +++++++++++++++----------- full_coverage.py | 134 ----------------------------------- 2 files changed, 59 insertions(+), 178 deletions(-) diff --git a/README b/README --- a/README +++ b/README @@ -21,7 +21,7 @@ When you are done you should have in this directory everything someone needs to contribute. Simply copy the whole directory to some sort of -media (USB 3 thumb drive and CD tend to work well) and then pass it around for +media (USB 3 drive and a CD tend to work well) and then pass it around for people to copy somewhere on to their system. They can run ``hg pull -u`` to get updates, sparing the probably taxed internet connection at the sprint from doing complete repository cloning. @@ -30,6 +30,11 @@ clones is a handy way to ensure old build artifacts have been removed. You will need to enable the purge extension in ``~/.hgrc``. +Also make sure to not simply copy your own repositories to the box! Otherwise +the clones will most likely have paths which use SSH and the hg account on +hg.python.org which only core developers can use. It's just easier to make the +clones from scratch. + Mercurial --------- @@ -68,7 +73,7 @@ CPython ------- -Clone the CPython repository and build it (you will be cleaning up your build +Clone the `CPython repository`_ and build it (you will be cleaning up your build later, though as a final step). Also make sure to build the documentation. This not only alleviates the need for @@ -78,55 +83,68 @@ being built by a repository build of CPython (and thus Python 3) this may no longer hold true. +.. _CPython repository: http://hg.python.org/cpython + PEPs ---- -Clone the repository and build it. That way if people need to reference a PEP -they can easily find itand will be able to use the easier-to-read HTML version. +Clone the `PEP repository`_ and build it. That way if people need to reference a +PEP they can easily find itand will be able to use the easier-to-read HTML +version. No specific guidelines for building the PEPs are provided for new contributors since there is only a slim chance they will be editing a PEP, and if they are then they should be able to figure out how to get the PEPs to build on their own. +.. _PEP repository: http://hg.python.org/peps + Devguide -------- -Clone the repository and build it. This gives people a local copy to use -rather than having to use the (probably slow) internet connection at the sprint. +Clone the `devguide repository`_ and build it. This gives people a local copy to +use rather than having to use the (probably slow) internet connection at the +sprint. If a new contributor needs to be able to build the devguide, they should only need to set their ``PYTHONPATH`` to point at the ``cpython/Doc/tools`` directory in the CPython repository thanks to the requisite projects being pulled in when you built the CPython documentation. +.. _devguide repository: http://hg.python.org/devguide + Coverage.py ----------- -01. Build the CPython repository -02. Clone the repository from https://bitbucket.org/ned/coveragepy -03. Clone setuptools from https://bitbucket.org/pypa/setuptools/ -04. Run ``python3 setup.py build`` in the ``setuptools`` directory -05. Run ``ln -s ../setuptools/build/lib/setuptools`` in the ``coveragepy`` - directory -06. Run ``ln -s ../setuptools/build/lib/pkg_resources.py`` in the - ``coveragepy`` directory -07. Run ``./cpython/python full_coverage.py build`` -08. Run ``./cpython/python full_coverage.py run`` -09. Run ``./cpython/python full_coverage.py html original_coverage_report`` -10. Run ``hg purge --all`` in the CPython repository +#. Download setuptools_ +#. Download coverage_ +#. Build CPython: ``./build_cpython.py`` +#. Create an venv: ``./cpython/python.exe -m venv venv`` +#. Extract setuptools and coverage: ``tar -x -f setuptools-*.tar.gz; tar -x -f coverage-*.tar.gz`` +#. Install setuptools in the venv: ``../venv/bin/python3 setup.py install`` +#. Install coverage in the venv +#. Set PYTHONPATH to ``fullcoverage`` (will need to change the directory): ``export PYTHONPATH=../coverage-N.N/coverage/fullcoverage`` +#. Run coverage from the venv: ``./bin/python -m coverage run --pylib -m test`` +#. Unset PYTHONPATH: ``unset PYTHONPATH`` +#. Generate coverage report: ``./bin/python -m coverage html --directory=../coverage_report -i --include="../cpython/Lib/*" --title="CPython test coverage report"`` +#. Delete project directories (but not tar files!) for coverage and setuptools +#. Delete venv +#. Clean up the cpython clone: either ``make distclean`` or check it out again +Do be aware that this step takes a few **hours**. If you find report generation +is the bottleneck (typically because of memory pressure), you can generate the +HTML reports in chunks at the cost of not having a comprehensive index. E.g. to +report for every module/package starting with the letter 'a':: -All these steps will generate a complete coverage report for the standard -library and put it in the ``original_coverage_report`` directory. Do note that -the location is **not** the default one for the script to prevent users from -accidentally overwriting the original copy (and thus needing to run the whole -coverage again from scratch). + ./bin/python3 -m coverage html --directory ../coverage_report -i ../cpython/Lib/a*.py ../cpython/Lib/a*/*.py ../cpython/Lib/a*/*/*.py -Do be aware that this step takes a few **hours**. +You can then create an index using the textual report from coverage.py. + +.. _setuptools: https://pypi.python.org/pypi/setuptools +.. _coverage: https://pypi.python.org/pypi/coverage Included files to help out @@ -136,6 +154,23 @@ both you and the new contributors. +``index.html`` +-------------- + +An HTML file with links to the various pieces of documentation you built +previously and the helper scripts. + + +``build_cpython.py`` +-------------------- +On UNIX-based OSs it builds the CPython repository. On all platforms it +verifies that the expected CPython binary exists. + +While the devguide includes instructions on how to build under UNIX, the script +just simplifies this by having a single command subsume both the configure and +build steps. It also uses reasonable defaults (e.g. all cores on the CPU). + + ``full_coverage.py`` --------------------- @@ -144,23 +179,3 @@ tests as an argument. The ``html`` directory can take an argument for a directory to write to, but the default should not conflict with the original coverage run you did earlier (if you followed the directions =) . - - -``build_cpython.py`` --------------------- -On UNIX-based OSs, builds the CPython repository, and on all platforms it -verifies that the expected CPython binary exists. - -While the devguide includes instructions on how to build under UNIX, the script -just simplifies this by having a single command subsume both the configure and -build steps. It also uses reasonable defaults (e.g. all cores on the CPU). - -(You may need to cd into the CPython directory and run ``make`` to get the -extension modules to build) - - -``index.html`` --------------- - -An HTML file with links to the various pieces of documentation you built -previously and the helper scripts. diff --git a/full_coverage.py b/full_coverage.py deleted file mode 100644 --- a/full_coverage.py +++ /dev/null @@ -1,134 +0,0 @@ -"""Use coverage.py on CPython's standard library. - -See the ``-h`` or ``help`` command of the script for instructions on use. - -""" -import importlib.machinery -import contextlib -import os -import shutil -import subprocess -import sys -import webbrowser - - -def path_from_here(path): - """Calculate the absolute path to 'path' from where this file is located.""" - return os.path.abspath(os.path.join(os.path.dirname(__file__), path)) - -COVERAGE = path_from_here('coveragepy') -REPORT = path_from_here('coverage_report') -CPYTHON = os.path.dirname(sys.executable) - - - at contextlib.contextmanager -def chdir(directory): - """Change the directory temporarily.""" - original_directory = os.getcwd() - os.chdir(directory) - yield - os.chdir(original_directory) - -def make_setup_cmd(*args): - # Silence a spurious warning from setuptools - # See https://bitbucket.org/pypa/setuptools/issue/29/ - cmd = [sys.executable, '-W', 'ignore::UserWarning:distutils.dist', - 'setup.py'] - cmd.extend(args) - return cmd - -def build(args): - """Build coverage.py's C-based tracer. - - Make sure to delete any pre-existing build to make sure it uses the latest - source from CPython. - """ - with chdir(COVERAGE): - for ext in importlib.machinery.EXTENSION_SUFFIXES: - tracer_path = os.path.join('coverage', 'tracer' + ext) - try: - os.unlink(tracer_path) - except FileNotFoundError: - pass - subprocess.check_call(make_setup_cmd('clean')) - env = os.environ.copy() - env['CPPFLAGS'] = '-I {} -I {}'.format(CPYTHON, - os.path.join(CPYTHON, 'Include')) - command = make_setup_cmd('build_ext', '--inplace') - process = subprocess.Popen(command, env=env) - process.wait() - - -def run(args): - """Run coverage.py against Python's stdlib as best as possible. - - If any specific tests are listed, then limit the run to those tests. - """ - command = [sys.executable, COVERAGE, 'run', '--pylib', 'Lib/test/regrtest.py'] - if args.tests: - command.extend(args.tests) - with chdir(CPYTHON): - try: - os.unlink(os.path.join(CPYTHON, '.coverage')) - except FileNotFoundError: - pass - pythonpath = os.path.join(COVERAGE, 'coverage', 'fullcoverage') - process = subprocess.Popen(command, env={'PYTHONPATH': pythonpath}) - process.wait() - - -def report(args): - """Generate the HTML-based coverage report. - - Write the results to either REPORT or a user-specified location. - """ - report = os.path.abspath(args.directory) - title = '{} {} test coverage'.format(sys.implementation.name, - sys.version.partition(' \n')[0]) - if os.path.exists(report): - shutil.rmtree(report) - with chdir(CPYTHON): - subprocess.check_call([sys.executable, COVERAGE, 'html', '-i', - '-d', report, '--omit', 'Lib/test/*', - '--title', title]) - print(os.path.join(report, 'index.html')) - - -if __name__ == '__main__': - import argparse - parser = argparse.ArgumentParser() - subparsers = parser.add_subparsers(dest='subparser_name', - help="use coverage.py on Python's standard library") - - build_parser = subparsers.add_parser('build', - help='build coverage.py using {}'.format(sys.executable)) - build_parser.set_defaults(func=build) - - stdlib_path = os.path.join(CPYTHON, 'Lib') - run_parser = subparsers.add_parser('run', - help='run coverage.py over the standard library at {} ' - '(coverage.py must already be built)'.format(stdlib_path)) - run_parser.add_argument('tests', action='store', nargs='*', - help='optional list of tests to run (default: all tests)') - run_parser.set_defaults(func=run) - - report_parser = subparsers.add_parser('html', - help='generate an HTML coverage report') - report_parser.add_argument('directory', - help='where to save the report (default: {})'.format(REPORT), - nargs='?', action='store', default=REPORT) - report_parser.set_defaults(func=report) - - help_parser = subparsers.add_parser('help', - help='show the help message for the specified command') - help_parser.add_argument('command', nargs='?', - help='for which command to show a help message') - - args = parser.parse_args() - if args.subparser_name != 'help': - args.func(args) - else: - help_args = ['-h'] - if args.command: - help_args.insert(0, args.command) - parser.parse_args(help_args) -- Repository URL: http://hg.python.org/devinabox From solipsis at pitrou.net Thu Aug 8 05:47:57 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Thu, 08 Aug 2013 05:47:57 +0200 Subject: [Python-checkins] Daily reference leaks (9877c25d9556): sum=0 Message-ID: results for 9877c25d9556 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogwoU4my', '-x'] From python-checkins at python.org Thu Aug 8 09:23:03 2013 From: python-checkins at python.org (larry.hastings) Date: Thu, 8 Aug 2013 09:23:03 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2315301=3A_Parsing_?= =?utf-8?q?fd=2C_uid=2C_and_gid_parameters_for_builtins?= Message-ID: <3c9gzC3tw1z7M3r@mail.python.org> http://hg.python.org/cpython/rev/f871f8662509 changeset: 85063:f871f8662509 user: Larry Hastings date: Thu Aug 08 00:19:50 2013 -0700 summary: Issue #15301: Parsing fd, uid, and gid parameters for builtins in Modules/posixmodule.c is now far more robust. files: Lib/test/test_os.py | 12 + Misc/NEWS | 5 +- Modules/posixmodule.c | 281 ++++++++++++++++++++--------- 3 files changed, 211 insertions(+), 87 deletions(-) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -24,6 +24,8 @@ import stat import locale import codecs +import decimal +import fractions try: import threading except ImportError: @@ -865,6 +867,16 @@ os.makedirs(path, mode=mode, exist_ok=True) os.umask(old_mask) + def test_chown_uid_gid_arguments_must_be_index(self): + stat = os.stat(support.TESTFN) + uid = stat.st_uid + gid = stat.st_gid + for value in (-1.0, -1j, decimal.Decimal(-1), fractions.Fraction(-2, 2)): + self.assertRaises(TypeError, os.chown, support.TESTFN, value, gid) + self.assertRaises(TypeError, os.chown, support.TESTFN, uid, value) + self.assertIsNone(os.chown(support.TESTFN, uid, gid)) + self.assertIsNone(os.chown(support.TESTFN, -1, -1)) + def test_exist_ok_s_isgid_directory(self): path = os.path.join(support.TESTFN, 'dir1') S_ISGID = stat.S_ISGID diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,8 +10,11 @@ Core and Builtins ----------------- +- Issue #15301: Parsing fd, uid, and gid parameters for builtins + in Modules/posixmodule.c is now far more robust. + - Issue #18368: PyOS_StdioReadline() no longer leaks memory when realloc() - fail + fail. - Issue #17934: Add a clear() method to frame objects, to help clean up expensive details (local variables) and break reference cycles. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -417,108 +417,213 @@ int _Py_Uid_Converter(PyObject *obj, void *p) { + uid_t uid; + PyObject *index; int overflow; long result; - if (PyFloat_Check(obj)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float"); + unsigned long uresult; + + index = PyNumber_Index(obj); + if (index == NULL) { + PyErr_Format(PyExc_TypeError, + "uid should be integer, not %.200s", + Py_TYPE(obj)->tp_name); return 0; } - result = PyLong_AsLongAndOverflow(obj, &overflow); + + /* + * Handling uid_t is complicated for two reasons: + * * Although uid_t is (always?) unsigned, it still + * accepts -1. + * * We don't know its size in advance--it may be + * bigger than an int, or it may be smaller than + * a long. + * + * So a bit of defensive programming is in order. + * Start with interpreting the value passed + * in as a signed long and see if it works. + */ + + result = PyLong_AsLongAndOverflow(index, &overflow); + + if (!overflow) { + uid = (uid_t)result; + + if (result == -1) { + if (PyErr_Occurred()) + goto fail; + /* It's a legitimate -1, we're done. */ + goto success; + } + + /* Any other negative number is disallowed. */ + if (result < 0) + goto underflow; + + /* Ensure the value wasn't truncated. */ + if (sizeof(uid_t) < sizeof(long) && + (long)uid != result) + goto underflow; + goto success; + } + if (overflow < 0) - goto OverflowDown; - if (!overflow && result == -1) { - /* error or -1 */ - if (PyErr_Occurred()) - return 0; - *(uid_t *)p = (uid_t)-1; - } - else { - /* unsigned uid_t */ - unsigned long uresult; - if (overflow > 0) { - uresult = PyLong_AsUnsignedLong(obj); - if (PyErr_Occurred()) { - if (PyErr_ExceptionMatches(PyExc_OverflowError)) - goto OverflowUp; - return 0; - } - if ((uid_t)uresult == (uid_t)-1) - goto OverflowUp; - } else { - if (result < 0) - goto OverflowDown; - uresult = result; - } - if (sizeof(uid_t) < sizeof(long) && - (unsigned long)(uid_t)uresult != uresult) - goto OverflowUp; - *(uid_t *)p = (uid_t)uresult; - } + goto underflow; + + /* + * Okay, the value overflowed a signed long. If it + * fits in an *unsigned* long, it may still be okay, + * as uid_t may be unsigned long on this platform. + */ + uresult = PyLong_AsUnsignedLong(index); + if (PyErr_Occurred()) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) + goto overflow; + goto fail; + } + + uid = (uid_t)uresult; + + /* + * If uid == (uid_t)-1, the user actually passed in ULONG_MAX, + * but this value would get interpreted as (uid_t)-1 by chown + * and its siblings. That's not what the user meant! So we + * throw an overflow exception instead. (We already + * handled a real -1 with PyLong_AsLongAndOverflow() above.) + */ + if (uid == (uid_t)-1) + goto overflow; + + /* Ensure the value wasn't truncated. */ + if (sizeof(uid_t) < sizeof(long) && + (unsigned long)uid != uresult) + goto overflow; + /* fallthrough */ + +success: + Py_DECREF(index); + *(uid_t *)p = uid; return 1; -OverflowDown: +underflow: PyErr_SetString(PyExc_OverflowError, - "user id is less than minimum"); - return 0; - -OverflowUp: + "uid is less than minimum"); + goto fail; + +overflow: PyErr_SetString(PyExc_OverflowError, - "user id is greater than maximum"); + "uid is greater than maximum"); + /* fallthrough */ + +fail: + Py_DECREF(index); return 0; } int _Py_Gid_Converter(PyObject *obj, void *p) { + gid_t gid; + PyObject *index; int overflow; long result; - if (PyFloat_Check(obj)) { - PyErr_SetString(PyExc_TypeError, - "integer argument expected, got float"); + unsigned long uresult; + + index = PyNumber_Index(obj); + if (index == NULL) { + PyErr_Format(PyExc_TypeError, + "gid should be integer, not %.200s", + Py_TYPE(obj)->tp_name); return 0; } - result = PyLong_AsLongAndOverflow(obj, &overflow); + + /* + * Handling gid_t is complicated for two reasons: + * * Although gid_t is (always?) unsigned, it still + * accepts -1. + * * We don't know its size in advance--it may be + * bigger than an int, or it may be smaller than + * a long. + * + * So a bit of defensive programming is in order. + * Start with interpreting the value passed + * in as a signed long and see if it works. + */ + + result = PyLong_AsLongAndOverflow(index, &overflow); + + if (!overflow) { + gid = (gid_t)result; + + if (result == -1) { + if (PyErr_Occurred()) + goto fail; + /* It's a legitimate -1, we're done. */ + goto success; + } + + /* Any other negative number is disallowed. */ + if (result < 0) { + goto underflow; + } + + /* Ensure the value wasn't truncated. */ + if (sizeof(gid_t) < sizeof(long) && + (long)gid != result) + goto underflow; + goto success; + } + if (overflow < 0) - goto OverflowDown; - if (!overflow && result == -1) { - /* error or -1 */ - if (PyErr_Occurred()) - return 0; - *(gid_t *)p = (gid_t)-1; - } - else { - /* unsigned gid_t */ - unsigned long uresult; - if (overflow > 0) { - uresult = PyLong_AsUnsignedLong(obj); - if (PyErr_Occurred()) { - if (PyErr_ExceptionMatches(PyExc_OverflowError)) - goto OverflowUp; - return 0; - } - if ((gid_t)uresult == (gid_t)-1) - goto OverflowUp; - } else { - if (result < 0) - goto OverflowDown; - uresult = result; - } - if (sizeof(gid_t) < sizeof(long) && - (unsigned long)(gid_t)uresult != uresult) - goto OverflowUp; - *(gid_t *)p = (gid_t)uresult; - } + goto underflow; + + /* + * Okay, the value overflowed a signed long. If it + * fits in an *unsigned* long, it may still be okay, + * as gid_t may be unsigned long on this platform. + */ + uresult = PyLong_AsUnsignedLong(index); + if (PyErr_Occurred()) { + if (PyErr_ExceptionMatches(PyExc_OverflowError)) + goto overflow; + goto fail; + } + + gid = (gid_t)uresult; + + /* + * If gid == (gid_t)-1, the user actually passed in ULONG_MAX, + * but this value would get interpreted as (gid_t)-1 by chown + * and its siblings. That's not what the user meant! So we + * throw an overflow exception instead. (We already + * handled a real -1 with PyLong_AsLongAndOverflow() above.) + */ + if (gid == (gid_t)-1) + goto overflow; + + /* Ensure the value wasn't truncated. */ + if (sizeof(gid_t) < sizeof(long) && + (unsigned long)gid != uresult) + goto overflow; + /* fallthrough */ + +success: + Py_DECREF(index); + *(gid_t *)p = gid; return 1; -OverflowDown: +underflow: PyErr_SetString(PyExc_OverflowError, - "group id is less than minimum"); - return 0; - -OverflowUp: + "gid is less than minimum"); + goto fail; + +overflow: PyErr_SetString(PyExc_OverflowError, - "group id is greater than maximum"); + "gid is greater than maximum"); + /* fallthrough */ + +fail: + Py_DECREF(index); return 0; } #endif /* MS_WINDOWS */ @@ -541,25 +646,29 @@ _fd_converter(PyObject *o, int *p, const char *allowed) { int overflow; - long long_value = PyLong_AsLongAndOverflow(o, &overflow); - if (PyFloat_Check(o) || - (long_value == -1 && !overflow && PyErr_Occurred())) { - PyErr_Clear(); + long long_value; + + PyObject *index = PyNumber_Index(o); + if (index == NULL) { PyErr_Format(PyExc_TypeError, - "argument should be %s, not %.200s", - allowed, Py_TYPE(o)->tp_name); + "argument should be %s, not %.200s", + allowed, Py_TYPE(o)->tp_name); return 0; } + + long_value = PyLong_AsLongAndOverflow(index, &overflow); + Py_DECREF(index); if (overflow > 0 || long_value > INT_MAX) { PyErr_SetString(PyExc_OverflowError, - "signed integer is greater than maximum"); + "fd is greater than maximum"); return 0; } if (overflow < 0 || long_value < INT_MIN) { PyErr_SetString(PyExc_OverflowError, - "signed integer is less than minimum"); + "fd is less than minimum"); return 0; } + *p = (int)long_value; return 1; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 14:31:48 2013 From: python-checkins at python.org (ezio.melotti) Date: Thu, 8 Aug 2013 14:31:48 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4MjczOiBtb3Zl?= =?utf-8?q?_the_tests_in_Lib/test/json=5Ftests_to_Lib/test/test=5Fjson_and?= =?utf-8?q?_make?= Message-ID: <3c9pqS21Gmz7LjN@mail.python.org> http://hg.python.org/cpython/rev/95cf8640b271 changeset: 85064:95cf8640b271 branch: 3.3 parent: 85061:698fd628b001 user: Ezio Melotti date: Thu Aug 08 15:03:45 2013 +0300 summary: #18273: move the tests in Lib/test/json_tests to Lib/test/test_json and make them discoverable by unittest. Patch by Zachary Ware. files: Lib/test/test_json.py | 17 ---------- Lib/test/json_tests/__init__.py | 13 +------ Lib/test/test_json/__main__.py | 4 ++ Lib/test/json_tests/test_decode.py | 2 +- Lib/test/json_tests/test_default.py | 2 +- Lib/test/json_tests/test_dump.py | 2 +- Lib/test/json_tests/test_encode_basestring_ascii.py | 2 +- Lib/test/json_tests/test_fail.py | 2 +- Lib/test/json_tests/test_float.py | 2 +- Lib/test/json_tests/test_indent.py | 2 +- Lib/test/json_tests/test_pass1.py | 2 +- Lib/test/json_tests/test_pass2.py | 2 +- Lib/test/json_tests/test_pass3.py | 2 +- Lib/test/json_tests/test_recursion.py | 2 +- Lib/test/json_tests/test_scanstring.py | 2 +- Lib/test/json_tests/test_separators.py | 2 +- Lib/test/json_tests/test_speedups.py | 2 +- Lib/test/json_tests/test_tool.py | 0 Lib/test/json_tests/test_unicode.py | 2 +- Makefile.pre.in | 2 +- Misc/NEWS | 3 + 21 files changed, 25 insertions(+), 44 deletions(-) diff --git a/Lib/test/test_json.py b/Lib/test/test_json.py deleted file mode 100644 --- a/Lib/test/test_json.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Tests for json. - -The tests for json are defined in the json.tests package; -the test_suite() function there returns a test suite that's ready to -be run. -""" - -from test import json_tests -import test.support - - -def test_main(): - test.support.run_unittest(json_tests.test_suite()) - - -if __name__ == "__main__": - test_main() diff --git a/Lib/test/json_tests/__init__.py b/Lib/test/test_json/__init__.py rename from Lib/test/json_tests/__init__.py rename to Lib/test/test_json/__init__.py --- a/Lib/test/json_tests/__init__.py +++ b/Lib/test/test_json/__init__.py @@ -44,12 +44,12 @@ here = os.path.dirname(__file__) -def test_suite(): +def load_tests(*args): suite = additional_tests() loader = unittest.TestLoader() for fn in os.listdir(here): if fn.startswith("test") and fn.endswith(".py"): - modname = "test.json_tests." + fn[:-3] + modname = "test.test_json." + fn[:-3] __import__(modname) module = sys.modules[modname] suite.addTests(loader.loadTestsFromModule(module)) @@ -62,12 +62,3 @@ suite.addTest(TestPyTest('test_pyjson')) suite.addTest(TestCTest('test_cjson')) return suite - -def main(): - suite = test_suite() - runner = unittest.TextTestRunner() - runner.run(suite) - -if __name__ == '__main__': - sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) - main() diff --git a/Lib/test/test_json/__main__.py b/Lib/test/test_json/__main__.py new file mode 100644 --- /dev/null +++ b/Lib/test/test_json/__main__.py @@ -0,0 +1,4 @@ +import unittest +from test.test_json import load_tests + +unittest.main() diff --git a/Lib/test/json_tests/test_decode.py b/Lib/test/test_json/test_decode.py rename from Lib/test/json_tests/test_decode.py rename to Lib/test/test_json/test_decode.py --- a/Lib/test/json_tests/test_decode.py +++ b/Lib/test/test_json/test_decode.py @@ -1,7 +1,7 @@ import decimal from io import StringIO from collections import OrderedDict -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestDecode: diff --git a/Lib/test/json_tests/test_default.py b/Lib/test/test_json/test_default.py rename from Lib/test/json_tests/test_default.py rename to Lib/test/test_json/test_default.py --- a/Lib/test/json_tests/test_default.py +++ b/Lib/test/test_json/test_default.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestDefault: diff --git a/Lib/test/json_tests/test_dump.py b/Lib/test/test_json/test_dump.py rename from Lib/test/json_tests/test_dump.py rename to Lib/test/test_json/test_dump.py --- a/Lib/test/json_tests/test_dump.py +++ b/Lib/test/test_json/test_dump.py @@ -1,5 +1,5 @@ from io import StringIO -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest from test.support import bigmemtest, _1G diff --git a/Lib/test/json_tests/test_encode_basestring_ascii.py b/Lib/test/test_json/test_encode_basestring_ascii.py rename from Lib/test/json_tests/test_encode_basestring_ascii.py rename to Lib/test/test_json/test_encode_basestring_ascii.py --- a/Lib/test/json_tests/test_encode_basestring_ascii.py +++ b/Lib/test/test_json/test_encode_basestring_ascii.py @@ -1,5 +1,5 @@ from collections import OrderedDict -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest CASES = [ diff --git a/Lib/test/json_tests/test_fail.py b/Lib/test/test_json/test_fail.py rename from Lib/test/json_tests/test_fail.py rename to Lib/test/test_json/test_fail.py --- a/Lib/test/json_tests/test_fail.py +++ b/Lib/test/test_json/test_fail.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest # 2007-10-05 JSONDOCS = [ diff --git a/Lib/test/json_tests/test_float.py b/Lib/test/test_json/test_float.py rename from Lib/test/json_tests/test_float.py rename to Lib/test/test_json/test_float.py --- a/Lib/test/json_tests/test_float.py +++ b/Lib/test/test_json/test_float.py @@ -1,5 +1,5 @@ import math -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestFloat: diff --git a/Lib/test/json_tests/test_indent.py b/Lib/test/test_json/test_indent.py rename from Lib/test/json_tests/test_indent.py rename to Lib/test/test_json/test_indent.py --- a/Lib/test/json_tests/test_indent.py +++ b/Lib/test/test_json/test_indent.py @@ -1,6 +1,6 @@ import textwrap from io import StringIO -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestIndent: diff --git a/Lib/test/json_tests/test_pass1.py b/Lib/test/test_json/test_pass1.py rename from Lib/test/json_tests/test_pass1.py rename to Lib/test/test_json/test_pass1.py --- a/Lib/test/json_tests/test_pass1.py +++ b/Lib/test/test_json/test_pass1.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest # from http://json.org/JSON_checker/test/pass1.json diff --git a/Lib/test/json_tests/test_pass2.py b/Lib/test/test_json/test_pass2.py rename from Lib/test/json_tests/test_pass2.py rename to Lib/test/test_json/test_pass2.py --- a/Lib/test/json_tests/test_pass2.py +++ b/Lib/test/test_json/test_pass2.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest # from http://json.org/JSON_checker/test/pass2.json diff --git a/Lib/test/json_tests/test_pass3.py b/Lib/test/test_json/test_pass3.py rename from Lib/test/json_tests/test_pass3.py rename to Lib/test/test_json/test_pass3.py --- a/Lib/test/json_tests/test_pass3.py +++ b/Lib/test/test_json/test_pass3.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest # from http://json.org/JSON_checker/test/pass3.json diff --git a/Lib/test/json_tests/test_recursion.py b/Lib/test/test_json/test_recursion.py rename from Lib/test/json_tests/test_recursion.py rename to Lib/test/test_json/test_recursion.py --- a/Lib/test/json_tests/test_recursion.py +++ b/Lib/test/test_json/test_recursion.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class JSONTestObject: diff --git a/Lib/test/json_tests/test_scanstring.py b/Lib/test/test_json/test_scanstring.py rename from Lib/test/json_tests/test_scanstring.py rename to Lib/test/test_json/test_scanstring.py --- a/Lib/test/json_tests/test_scanstring.py +++ b/Lib/test/test_json/test_scanstring.py @@ -1,5 +1,5 @@ import sys -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestScanstring: diff --git a/Lib/test/json_tests/test_separators.py b/Lib/test/test_json/test_separators.py rename from Lib/test/json_tests/test_separators.py rename to Lib/test/test_json/test_separators.py --- a/Lib/test/json_tests/test_separators.py +++ b/Lib/test/test_json/test_separators.py @@ -1,5 +1,5 @@ import textwrap -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestSeparators: diff --git a/Lib/test/json_tests/test_speedups.py b/Lib/test/test_json/test_speedups.py rename from Lib/test/json_tests/test_speedups.py rename to Lib/test/test_json/test_speedups.py --- a/Lib/test/json_tests/test_speedups.py +++ b/Lib/test/test_json/test_speedups.py @@ -1,4 +1,4 @@ -from test.json_tests import CTest +from test.test_json import CTest class TestSpeedups(CTest): diff --git a/Lib/test/json_tests/test_tool.py b/Lib/test/test_json/test_tool.py rename from Lib/test/json_tests/test_tool.py rename to Lib/test/test_json/test_tool.py diff --git a/Lib/test/json_tests/test_unicode.py b/Lib/test/test_json/test_unicode.py rename from Lib/test/json_tests/test_unicode.py rename to Lib/test/test_json/test_unicode.py --- a/Lib/test/json_tests/test_unicode.py +++ b/Lib/test/test_json/test_unicode.py @@ -1,5 +1,5 @@ from collections import OrderedDict -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestUnicode: diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1036,7 +1036,7 @@ test/namespace_pkgs/module_and_namespace_package/a_test \ collections concurrent concurrent/futures encodings \ email email/mime test/test_email test/test_email/data \ - html json test/json_tests http dbm xmlrpc \ + html json test/test_json http dbm xmlrpc \ sqlite3 sqlite3/test \ logging csv wsgiref urllib \ lib2to3 lib2to3/fixes lib2to3/pgen2 lib2to3/tests \ diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -237,6 +237,9 @@ Tests ----- +- Issue #18273: move the tests in Lib/test/json_tests to Lib/test/test_json + and make them discoverable by unittest. Patch by Zachary Ware. + - Fix a fcntl test case on KFreeBSD, Debian #708653 (Petr Salinger). - Issue #18396: Fix spurious test failure in test_signal on Windows when -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 14:31:49 2013 From: python-checkins at python.org (ezio.melotti) Date: Thu, 8 Aug 2013 14:31:49 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4MjczOiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3c9pqT5PKQz7LjV@mail.python.org> http://hg.python.org/cpython/rev/f7ed301e7199 changeset: 85065:f7ed301e7199 parent: 85063:f871f8662509 parent: 85064:95cf8640b271 user: Ezio Melotti date: Thu Aug 08 15:18:26 2013 +0300 summary: #18273: merge with 3.3. files: Lib/test/test_json.py | 17 ---------- Lib/test/json_tests/__init__.py | 13 +------ Lib/test/test_json/__main__.py | 4 ++ Lib/test/json_tests/test_decode.py | 2 +- Lib/test/json_tests/test_default.py | 2 +- Lib/test/json_tests/test_dump.py | 2 +- Lib/test/json_tests/test_encode_basestring_ascii.py | 2 +- Lib/test/json_tests/test_fail.py | 2 +- Lib/test/json_tests/test_float.py | 2 +- Lib/test/json_tests/test_indent.py | 2 +- Lib/test/json_tests/test_pass1.py | 2 +- Lib/test/json_tests/test_pass2.py | 2 +- Lib/test/json_tests/test_pass3.py | 2 +- Lib/test/json_tests/test_recursion.py | 2 +- Lib/test/json_tests/test_scanstring.py | 2 +- Lib/test/json_tests/test_separators.py | 2 +- Lib/test/json_tests/test_speedups.py | 2 +- Lib/test/json_tests/test_tool.py | 0 Lib/test/json_tests/test_unicode.py | 2 +- Makefile.pre.in | 2 +- Misc/NEWS | 3 + 21 files changed, 25 insertions(+), 44 deletions(-) diff --git a/Lib/test/test_json.py b/Lib/test/test_json.py deleted file mode 100644 --- a/Lib/test/test_json.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Tests for json. - -The tests for json are defined in the json.tests package; -the test_suite() function there returns a test suite that's ready to -be run. -""" - -from test import json_tests -import test.support - - -def test_main(): - test.support.run_unittest(json_tests.test_suite()) - - -if __name__ == "__main__": - test_main() diff --git a/Lib/test/json_tests/__init__.py b/Lib/test/test_json/__init__.py rename from Lib/test/json_tests/__init__.py rename to Lib/test/test_json/__init__.py --- a/Lib/test/json_tests/__init__.py +++ b/Lib/test/test_json/__init__.py @@ -44,12 +44,12 @@ here = os.path.dirname(__file__) -def test_suite(): +def load_tests(*args): suite = additional_tests() loader = unittest.TestLoader() for fn in os.listdir(here): if fn.startswith("test") and fn.endswith(".py"): - modname = "test.json_tests." + fn[:-3] + modname = "test.test_json." + fn[:-3] __import__(modname) module = sys.modules[modname] suite.addTests(loader.loadTestsFromModule(module)) @@ -62,12 +62,3 @@ suite.addTest(TestPyTest('test_pyjson')) suite.addTest(TestCTest('test_cjson')) return suite - -def main(): - suite = test_suite() - runner = unittest.TextTestRunner() - runner.run(suite) - -if __name__ == '__main__': - sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) - main() diff --git a/Lib/test/test_json/__main__.py b/Lib/test/test_json/__main__.py new file mode 100644 --- /dev/null +++ b/Lib/test/test_json/__main__.py @@ -0,0 +1,4 @@ +import unittest +from test.test_json import load_tests + +unittest.main() diff --git a/Lib/test/json_tests/test_decode.py b/Lib/test/test_json/test_decode.py rename from Lib/test/json_tests/test_decode.py rename to Lib/test/test_json/test_decode.py --- a/Lib/test/json_tests/test_decode.py +++ b/Lib/test/test_json/test_decode.py @@ -1,7 +1,7 @@ import decimal from io import StringIO from collections import OrderedDict -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestDecode: diff --git a/Lib/test/json_tests/test_default.py b/Lib/test/test_json/test_default.py rename from Lib/test/json_tests/test_default.py rename to Lib/test/test_json/test_default.py --- a/Lib/test/json_tests/test_default.py +++ b/Lib/test/test_json/test_default.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestDefault: diff --git a/Lib/test/json_tests/test_dump.py b/Lib/test/test_json/test_dump.py rename from Lib/test/json_tests/test_dump.py rename to Lib/test/test_json/test_dump.py --- a/Lib/test/json_tests/test_dump.py +++ b/Lib/test/test_json/test_dump.py @@ -1,5 +1,5 @@ from io import StringIO -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest from test.support import bigmemtest, _1G diff --git a/Lib/test/json_tests/test_encode_basestring_ascii.py b/Lib/test/test_json/test_encode_basestring_ascii.py rename from Lib/test/json_tests/test_encode_basestring_ascii.py rename to Lib/test/test_json/test_encode_basestring_ascii.py --- a/Lib/test/json_tests/test_encode_basestring_ascii.py +++ b/Lib/test/test_json/test_encode_basestring_ascii.py @@ -1,5 +1,5 @@ from collections import OrderedDict -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest CASES = [ diff --git a/Lib/test/json_tests/test_fail.py b/Lib/test/test_json/test_fail.py rename from Lib/test/json_tests/test_fail.py rename to Lib/test/test_json/test_fail.py --- a/Lib/test/json_tests/test_fail.py +++ b/Lib/test/test_json/test_fail.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest import re # 2007-10-05 diff --git a/Lib/test/json_tests/test_float.py b/Lib/test/test_json/test_float.py rename from Lib/test/json_tests/test_float.py rename to Lib/test/test_json/test_float.py --- a/Lib/test/json_tests/test_float.py +++ b/Lib/test/test_json/test_float.py @@ -1,5 +1,5 @@ import math -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestFloat: diff --git a/Lib/test/json_tests/test_indent.py b/Lib/test/test_json/test_indent.py rename from Lib/test/json_tests/test_indent.py rename to Lib/test/test_json/test_indent.py --- a/Lib/test/json_tests/test_indent.py +++ b/Lib/test/test_json/test_indent.py @@ -1,6 +1,6 @@ import textwrap from io import StringIO -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestIndent: diff --git a/Lib/test/json_tests/test_pass1.py b/Lib/test/test_json/test_pass1.py rename from Lib/test/json_tests/test_pass1.py rename to Lib/test/test_json/test_pass1.py --- a/Lib/test/json_tests/test_pass1.py +++ b/Lib/test/test_json/test_pass1.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest # from http://json.org/JSON_checker/test/pass1.json diff --git a/Lib/test/json_tests/test_pass2.py b/Lib/test/test_json/test_pass2.py rename from Lib/test/json_tests/test_pass2.py rename to Lib/test/test_json/test_pass2.py --- a/Lib/test/json_tests/test_pass2.py +++ b/Lib/test/test_json/test_pass2.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest # from http://json.org/JSON_checker/test/pass2.json diff --git a/Lib/test/json_tests/test_pass3.py b/Lib/test/test_json/test_pass3.py rename from Lib/test/json_tests/test_pass3.py rename to Lib/test/test_json/test_pass3.py --- a/Lib/test/json_tests/test_pass3.py +++ b/Lib/test/test_json/test_pass3.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest # from http://json.org/JSON_checker/test/pass3.json diff --git a/Lib/test/json_tests/test_recursion.py b/Lib/test/test_json/test_recursion.py rename from Lib/test/json_tests/test_recursion.py rename to Lib/test/test_json/test_recursion.py --- a/Lib/test/json_tests/test_recursion.py +++ b/Lib/test/test_json/test_recursion.py @@ -1,4 +1,4 @@ -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class JSONTestObject: diff --git a/Lib/test/json_tests/test_scanstring.py b/Lib/test/test_json/test_scanstring.py rename from Lib/test/json_tests/test_scanstring.py rename to Lib/test/test_json/test_scanstring.py --- a/Lib/test/json_tests/test_scanstring.py +++ b/Lib/test/test_json/test_scanstring.py @@ -1,5 +1,5 @@ import sys -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestScanstring: diff --git a/Lib/test/json_tests/test_separators.py b/Lib/test/test_json/test_separators.py rename from Lib/test/json_tests/test_separators.py rename to Lib/test/test_json/test_separators.py --- a/Lib/test/json_tests/test_separators.py +++ b/Lib/test/test_json/test_separators.py @@ -1,5 +1,5 @@ import textwrap -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestSeparators: diff --git a/Lib/test/json_tests/test_speedups.py b/Lib/test/test_json/test_speedups.py rename from Lib/test/json_tests/test_speedups.py rename to Lib/test/test_json/test_speedups.py --- a/Lib/test/json_tests/test_speedups.py +++ b/Lib/test/test_json/test_speedups.py @@ -1,4 +1,4 @@ -from test.json_tests import CTest +from test.test_json import CTest class TestSpeedups(CTest): diff --git a/Lib/test/json_tests/test_tool.py b/Lib/test/test_json/test_tool.py rename from Lib/test/json_tests/test_tool.py rename to Lib/test/test_json/test_tool.py diff --git a/Lib/test/json_tests/test_unicode.py b/Lib/test/test_json/test_unicode.py rename from Lib/test/json_tests/test_unicode.py rename to Lib/test/test_json/test_unicode.py --- a/Lib/test/json_tests/test_unicode.py +++ b/Lib/test/test_json/test_unicode.py @@ -1,5 +1,5 @@ from collections import OrderedDict -from test.json_tests import PyTest, CTest +from test.test_json import PyTest, CTest class TestUnicode: diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1079,7 +1079,7 @@ test/namespace_pkgs/module_and_namespace_package/a_test \ collections concurrent concurrent/futures encodings \ email email/mime test/test_email test/test_email/data \ - html json test/json_tests http dbm xmlrpc \ + html json test/test_json http dbm xmlrpc \ sqlite3 sqlite3/test \ logging csv wsgiref urllib \ lib2to3 lib2to3/fixes lib2to3/pgen2 lib2to3/tests \ diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -634,6 +634,9 @@ Tests ----- +- Issue #18273: move the tests in Lib/test/json_tests to Lib/test/test_json + and make them discoverable by unittest. Patch by Zachary Ware. + - Fix a fcntl test case on KFreeBSD, Debian #708653 (Petr Salinger). - Issue #18396: Fix spurious test failure in test_signal on Windows when -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 14:46:27 2013 From: python-checkins at python.org (ezio.melotti) Date: Thu, 8 Aug 2013 14:46:27 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4MjY3OiBtYWtl?= =?utf-8?q?_whitespace_consistent_and_fix_an_operator=2E?= Message-ID: <3c9q8M61bPz7Ljb@mail.python.org> http://hg.python.org/cpython/rev/38d341ef28b3 changeset: 85066:38d341ef28b3 branch: 3.3 parent: 85064:95cf8640b271 user: Ezio Melotti date: Thu Aug 08 15:45:56 2013 +0300 summary: #18267: make whitespace consistent and fix an operator. files: Doc/library/xmlrpc.client.rst | 18 +++++++++--------- 1 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Doc/library/xmlrpc.client.rst b/Doc/library/xmlrpc.client.rst --- a/Doc/library/xmlrpc.client.rst +++ b/Doc/library/xmlrpc.client.rst @@ -439,14 +439,14 @@ from xmlrpc.server import SimpleXMLRPCServer - def add(x,y): - return x+y + def add(x, y): + return x + y def subtract(x, y): - return x-y + return x - y def multiply(x, y): - return x*y + return x * y def divide(x, y): return x // y @@ -467,13 +467,13 @@ proxy = xmlrpc.client.ServerProxy("http://localhost:8000/") multicall = xmlrpc.client.MultiCall(proxy) - multicall.add(7,3) - multicall.subtract(7,3) - multicall.multiply(7,3) - multicall.divide(7,3) + multicall.add(7, 3) + multicall.subtract(7, 3) + multicall.multiply(7, 3) + multicall.divide(7, 3) result = multicall() - print("7+3=%d, 7-3=%d, 7*3=%d, 7/3=%d" % tuple(result)) + print("7+3=%d, 7-3=%d, 7*3=%d, 7//3=%d" % tuple(result)) Convenience Functions -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 14:46:29 2013 From: python-checkins at python.org (ezio.melotti) Date: Thu, 8 Aug 2013 14:46:29 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4MjY3OiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3c9q8P12tyzQ00@mail.python.org> http://hg.python.org/cpython/rev/9875410ed390 changeset: 85067:9875410ed390 parent: 85065:f7ed301e7199 parent: 85066:38d341ef28b3 user: Ezio Melotti date: Thu Aug 08 15:46:13 2013 +0300 summary: #18267: merge with 3.3. files: Doc/library/xmlrpc.client.rst | 18 +++++++++--------- 1 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Doc/library/xmlrpc.client.rst b/Doc/library/xmlrpc.client.rst --- a/Doc/library/xmlrpc.client.rst +++ b/Doc/library/xmlrpc.client.rst @@ -439,14 +439,14 @@ from xmlrpc.server import SimpleXMLRPCServer - def add(x,y): - return x+y + def add(x, y): + return x + y def subtract(x, y): - return x-y + return x - y def multiply(x, y): - return x*y + return x * y def divide(x, y): return x // y @@ -467,13 +467,13 @@ proxy = xmlrpc.client.ServerProxy("http://localhost:8000/") multicall = xmlrpc.client.MultiCall(proxy) - multicall.add(7,3) - multicall.subtract(7,3) - multicall.multiply(7,3) - multicall.divide(7,3) + multicall.add(7, 3) + multicall.subtract(7, 3) + multicall.multiply(7, 3) + multicall.divide(7, 3) result = multicall() - print("7+3=%d, 7-3=%d, 7*3=%d, 7/3=%d" % tuple(result)) + print("7+3=%d, 7-3=%d, 7*3=%d, 7//3=%d" % tuple(result)) Convenience Functions -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 15:52:00 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Thu, 8 Aug 2013 15:52:00 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_Fix_a_typo_in_?= =?utf-8?q?PyUnicode=5FCopyCharacters=28=29_documentation=2E?= Message-ID: <3c9rc07510z7LjN@mail.python.org> http://hg.python.org/cpython/rev/2b3e9ffb7c7d changeset: 85068:2b3e9ffb7c7d branch: 3.3 parent: 85066:38d341ef28b3 user: Serhiy Storchaka date: Thu Aug 08 16:47:43 2013 +0300 summary: Fix a typo in PyUnicode_CopyCharacters() documentation. files: Doc/c-api/unicode.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -565,7 +565,7 @@ .. c:function:: int PyUnicode_CopyCharacters(PyObject *to, Py_ssize_t to_start, \ - PyObject *to, Py_ssize_t from_start, Py_ssize_t how_many) + PyObject *from, Py_ssize_t from_start, Py_ssize_t how_many) Copy characters from one Unicode object into another. This function performs character conversion when necessary and falls back to :c:func:`memcpy` if -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 15:52:02 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Thu, 8 Aug 2013 15:52:02 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Fix_a_typo_in_PyUnicode=5FCopyCharacters=28=29_documenta?= =?utf-8?q?tion=2E?= Message-ID: <3c9rc21tJHz7LjN@mail.python.org> http://hg.python.org/cpython/rev/7d7dc2fa61c1 changeset: 85069:7d7dc2fa61c1 parent: 85067:9875410ed390 parent: 85068:2b3e9ffb7c7d user: Serhiy Storchaka date: Thu Aug 08 16:49:45 2013 +0300 summary: Fix a typo in PyUnicode_CopyCharacters() documentation. files: Doc/c-api/unicode.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -576,7 +576,7 @@ .. c:function:: int PyUnicode_CopyCharacters(PyObject *to, Py_ssize_t to_start, \ - PyObject *to, Py_ssize_t from_start, Py_ssize_t how_many) + PyObject *from, Py_ssize_t from_start, Py_ssize_t how_many) Copy characters from one Unicode object into another. This function performs character conversion when necessary and falls back to :c:func:`memcpy` if -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 18:40:18 2013 From: python-checkins at python.org (ezio.melotti) Date: Thu, 8 Aug 2013 18:40:18 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NTgxOiByZW1v?= =?utf-8?q?ve_duplicate_test_and_run_a_test_class_that_was_skipped=2E__Ini?= =?utf-8?q?tial?= Message-ID: <3c9wLB3G3zzQRq@mail.python.org> http://hg.python.org/cpython/rev/e8f8f81c5af6 changeset: 85070:e8f8f81c5af6 branch: 3.3 parent: 85068:2b3e9ffb7c7d user: Ezio Melotti date: Thu Aug 08 19:36:36 2013 +0300 summary: #18581: remove duplicate test and run a test class that was skipped. Initial patch by Vajrasky Kok. files: Lib/test/test_abc.py | 32 -------------------------------- 1 files changed, 0 insertions(+), 32 deletions(-) diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py --- a/Lib/test/test_abc.py +++ b/Lib/test/test_abc.py @@ -65,34 +65,6 @@ self.assertEqual(D.foo(), 4) self.assertEqual(D().foo(), 4) - def test_abstractmethod_integration(self): - for abstractthing in [abc.abstractmethod, abc.abstractproperty, - abc.abstractclassmethod, - abc.abstractstaticmethod]: - class C(metaclass=abc.ABCMeta): - @abstractthing - def foo(self): pass # abstract - def bar(self): pass # concrete - self.assertEqual(C.__abstractmethods__, {"foo"}) - self.assertRaises(TypeError, C) # because foo is abstract - self.assertTrue(isabstract(C)) - class D(C): - def bar(self): pass # concrete override of concrete - self.assertEqual(D.__abstractmethods__, {"foo"}) - self.assertRaises(TypeError, D) # because foo is still abstract - self.assertTrue(isabstract(D)) - class E(D): - def foo(self): pass - self.assertEqual(E.__abstractmethods__, set()) - E() # now foo is concrete, too - self.assertFalse(isabstract(E)) - class F(E): - @abstractthing - def bar(self): pass # abstract override of concrete - self.assertEqual(F.__abstractmethods__, {"bar"}) - self.assertRaises(TypeError, F) # because bar is abstract now - self.assertTrue(isabstract(F)) - class TestABC(unittest.TestCase): @@ -416,9 +388,5 @@ self.assertEqual(B.counter, 1) -def test_main(): - support.run_unittest(TestABC) - - if __name__ == "__main__": unittest.main() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 18:40:19 2013 From: python-checkins at python.org (ezio.melotti) Date: Thu, 8 Aug 2013 18:40:19 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NTgxOiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3c9wLC6VJ9z7LlY@mail.python.org> http://hg.python.org/cpython/rev/328b1a29102c changeset: 85071:328b1a29102c parent: 85069:7d7dc2fa61c1 parent: 85070:e8f8f81c5af6 user: Ezio Melotti date: Thu Aug 08 19:37:52 2013 +0300 summary: #18581: merge with 3.3. files: Lib/test/test_abc.py | 32 -------------------------------- 1 files changed, 0 insertions(+), 32 deletions(-) diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py --- a/Lib/test/test_abc.py +++ b/Lib/test/test_abc.py @@ -65,34 +65,6 @@ self.assertEqual(D.foo(), 4) self.assertEqual(D().foo(), 4) - def test_abstractmethod_integration(self): - for abstractthing in [abc.abstractmethod, abc.abstractproperty, - abc.abstractclassmethod, - abc.abstractstaticmethod]: - class C(metaclass=abc.ABCMeta): - @abstractthing - def foo(self): pass # abstract - def bar(self): pass # concrete - self.assertEqual(C.__abstractmethods__, {"foo"}) - self.assertRaises(TypeError, C) # because foo is abstract - self.assertTrue(isabstract(C)) - class D(C): - def bar(self): pass # concrete override of concrete - self.assertEqual(D.__abstractmethods__, {"foo"}) - self.assertRaises(TypeError, D) # because foo is still abstract - self.assertTrue(isabstract(D)) - class E(D): - def foo(self): pass - self.assertEqual(E.__abstractmethods__, set()) - E() # now foo is concrete, too - self.assertFalse(isabstract(E)) - class F(E): - @abstractthing - def bar(self): pass # abstract override of concrete - self.assertEqual(F.__abstractmethods__, {"bar"}) - self.assertRaises(TypeError, F) # because bar is abstract now - self.assertTrue(isabstract(F)) - class TestABC(unittest.TestCase): @@ -432,9 +404,5 @@ self.assertEqual(B.counter, 1) -def test_main(): - support.run_unittest(TestABC) - - if __name__ == "__main__": unittest.main() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 19:13:14 2013 From: python-checkins at python.org (ezio.melotti) Date: Thu, 8 Aug 2013 19:13:14 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogIzE4MzU3OiBhZGQg?= =?utf-8?q?tests_for_dictview_set_difference=2E__Patch_by_Fraser_Tweedale?= =?utf-8?q?=2E?= Message-ID: <3c9x4B0N66z7Ll5@mail.python.org> http://hg.python.org/cpython/rev/0152152b09d0 changeset: 85072:0152152b09d0 branch: 2.7 parent: 85051:719ee60fc5e2 user: Ezio Melotti date: Thu Aug 08 20:09:19 2013 +0300 summary: #18357: add tests for dictview set difference. Patch by Fraser Tweedale. files: Lib/test/test_dictviews.py | 15 +++++++++++++++ Misc/ACKS | 1 + Misc/NEWS | 3 +++ 3 files changed, 19 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_dictviews.py b/Lib/test/test_dictviews.py --- a/Lib/test/test_dictviews.py +++ b/Lib/test/test_dictviews.py @@ -112,6 +112,13 @@ self.assertEqual(d1.viewkeys() ^ set(d3.viewkeys()), {'a', 'b', 'd', 'e'}) + self.assertEqual(d1.viewkeys() - d1.viewkeys(), set()) + self.assertEqual(d1.viewkeys() - d2.viewkeys(), {'a'}) + self.assertEqual(d1.viewkeys() - d3.viewkeys(), {'a', 'b'}) + self.assertEqual(d1.viewkeys() - set(d1.viewkeys()), set()) + self.assertEqual(d1.viewkeys() - set(d2.viewkeys()), {'a'}) + self.assertEqual(d1.viewkeys() - set(d3.viewkeys()), {'a', 'b'}) + def test_items_set_operations(self): d1 = {'a': 1, 'b': 2} d2 = {'a': 2, 'b': 2} @@ -144,6 +151,14 @@ self.assertEqual(d1.viewitems() ^ d3.viewitems(), {('a', 1), ('b', 2), ('d', 4), ('e', 5)}) + self.assertEqual(d1.viewitems() - d1.viewitems(), set()) + self.assertEqual(d1.viewitems() - d2.viewitems(), {('a', 1)}) + self.assertEqual(d1.viewitems() - d3.viewitems(), {('a', 1), ('b', 2)}) + self.assertEqual(d1.viewitems() - set(d1.viewitems()), set()) + self.assertEqual(d1.viewitems() - set(d2.viewitems()), {('a', 1)}) + self.assertEqual(d1.viewitems() - set(d3.viewitems()), + {('a', 1), ('b', 2)}) + def test_recursive_repr(self): d = {} d[42] = d.viewvalues() diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1042,6 +1042,7 @@ Stephen Turner Theodore Turocy Bill Tutt +Fraser Tweedale Doobee R. Tzeck Eren T?rkay Lionel Ulmer diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -142,6 +142,9 @@ Tests ----- +- Issue #18357: add tests for dictview set difference. + Patch by Fraser Tweedale. + - Issue #11185: Fix test_wait4 under AIX. Patch by S?bastien Sabl?. - Issue #18094: test_uuid no more reports skipped tests as passed. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 19:13:15 2013 From: python-checkins at python.org (ezio.melotti) Date: Thu, 8 Aug 2013 19:13:15 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4MzU3OiBhZGQg?= =?utf-8?q?tests_for_dictview_set_difference=2E__Patch_by_Fraser_Tweedale?= =?utf-8?q?=2E?= Message-ID: <3c9x4C3hlLz7Lm5@mail.python.org> http://hg.python.org/cpython/rev/8b557ef46d7c changeset: 85073:8b557ef46d7c branch: 3.3 parent: 85070:e8f8f81c5af6 user: Ezio Melotti date: Thu Aug 08 20:12:28 2013 +0300 summary: #18357: add tests for dictview set difference. Patch by Fraser Tweedale. files: Lib/test/test_dictviews.py | 14 ++++++++++++++ Misc/ACKS | 1 + Misc/NEWS | 3 +++ 3 files changed, 18 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_dictviews.py b/Lib/test/test_dictviews.py --- a/Lib/test/test_dictviews.py +++ b/Lib/test/test_dictviews.py @@ -112,6 +112,13 @@ self.assertEqual(d1.keys() ^ set(d3.keys()), {'a', 'b', 'd', 'e'}) + self.assertEqual(d1.keys() - d1.keys(), set()) + self.assertEqual(d1.keys() - d2.keys(), {'a'}) + self.assertEqual(d1.keys() - d3.keys(), {'a', 'b'}) + self.assertEqual(d1.keys() - set(d1.keys()), set()) + self.assertEqual(d1.keys() - set(d2.keys()), {'a'}) + self.assertEqual(d1.keys() - set(d3.keys()), {'a', 'b'}) + self.assertFalse(d1.keys().isdisjoint(d1.keys())) self.assertFalse(d1.keys().isdisjoint(d2.keys())) self.assertFalse(d1.keys().isdisjoint(list(d2.keys()))) @@ -162,6 +169,13 @@ self.assertEqual(d1.items() ^ d3.items(), {('a', 1), ('b', 2), ('d', 4), ('e', 5)}) + self.assertEqual(d1.items() - d1.items(), set()) + self.assertEqual(d1.items() - d2.items(), {('a', 1)}) + self.assertEqual(d1.items() - d3.items(), {('a', 1), ('b', 2)}) + self.assertEqual(d1.items() - set(d1.items()), set()) + self.assertEqual(d1.items() - set(d2.items()), {('a', 1)}) + self.assertEqual(d1.items() - set(d3.items()), {('a', 1), ('b', 2)}) + self.assertFalse(d1.items().isdisjoint(d1.items())) self.assertFalse(d1.items().isdisjoint(d2.items())) self.assertFalse(d1.items().isdisjoint(list(d2.items()))) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1252,6 +1252,7 @@ Stephen Turner Theodore Turocy Bill Tutt +Fraser Tweedale Doobee R. Tzeck Eren T?rkay Lionel Ulmer diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -237,6 +237,9 @@ Tests ----- +- Issue #18357: add tests for dictview set difference. + Patch by Fraser Tweedale. + - Issue #18273: move the tests in Lib/test/json_tests to Lib/test/test_json and make them discoverable by unittest. Patch by Zachary Ware. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 19:13:16 2013 From: python-checkins at python.org (ezio.melotti) Date: Thu, 8 Aug 2013 19:13:16 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4MzU3OiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3c9x4D6vFTz7LmH@mail.python.org> http://hg.python.org/cpython/rev/4db3d3d5815c changeset: 85074:4db3d3d5815c parent: 85071:328b1a29102c parent: 85073:8b557ef46d7c user: Ezio Melotti date: Thu Aug 08 20:12:57 2013 +0300 summary: #18357: merge with 3.3. files: Lib/test/test_dictviews.py | 14 ++++++++++++++ Misc/ACKS | 1 + Misc/NEWS | 3 +++ 3 files changed, 18 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_dictviews.py b/Lib/test/test_dictviews.py --- a/Lib/test/test_dictviews.py +++ b/Lib/test/test_dictviews.py @@ -112,6 +112,13 @@ self.assertEqual(d1.keys() ^ set(d3.keys()), {'a', 'b', 'd', 'e'}) + self.assertEqual(d1.keys() - d1.keys(), set()) + self.assertEqual(d1.keys() - d2.keys(), {'a'}) + self.assertEqual(d1.keys() - d3.keys(), {'a', 'b'}) + self.assertEqual(d1.keys() - set(d1.keys()), set()) + self.assertEqual(d1.keys() - set(d2.keys()), {'a'}) + self.assertEqual(d1.keys() - set(d3.keys()), {'a', 'b'}) + self.assertFalse(d1.keys().isdisjoint(d1.keys())) self.assertFalse(d1.keys().isdisjoint(d2.keys())) self.assertFalse(d1.keys().isdisjoint(list(d2.keys()))) @@ -162,6 +169,13 @@ self.assertEqual(d1.items() ^ d3.items(), {('a', 1), ('b', 2), ('d', 4), ('e', 5)}) + self.assertEqual(d1.items() - d1.items(), set()) + self.assertEqual(d1.items() - d2.items(), {('a', 1)}) + self.assertEqual(d1.items() - d3.items(), {('a', 1), ('b', 2)}) + self.assertEqual(d1.items() - set(d1.items()), set()) + self.assertEqual(d1.items() - set(d2.items()), {('a', 1)}) + self.assertEqual(d1.items() - set(d3.items()), {('a', 1), ('b', 2)}) + self.assertFalse(d1.items().isdisjoint(d1.items())) self.assertFalse(d1.items().isdisjoint(d2.items())) self.assertFalse(d1.items().isdisjoint(list(d2.items()))) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1290,6 +1290,7 @@ Stephen Turner Theodore Turocy Bill Tutt +Fraser Tweedale Doobee R. Tzeck Eren T?rkay Lionel Ulmer diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -350,6 +350,9 @@ when \r\n appears at end of 65535 bytes without other newlines. - Issue #18076: Introduce importlib.util.decode_source(). +- Issue #18357: add tests for dictview set difference. + Patch by Fraser Tweedale. + - importlib.abc.SourceLoader.get_source() no longer changes SyntaxError or UnicodeDecodeError into ImportError. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 8 19:29:06 2013 From: python-checkins at python.org (vinay.sajip) Date: Thu, 8 Aug 2013 19:29:06 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Closes_=2318671=3A_Output_?= =?utf-8?q?more_information_when_logging_exceptions_occur=2E?= Message-ID: <3c9xQV20Fcz7Lmv@mail.python.org> http://hg.python.org/cpython/rev/a206f952668e changeset: 85075:a206f952668e user: Vinay Sajip date: Thu Aug 08 18:28:53 2013 +0100 summary: Closes #18671: Output more information when logging exceptions occur. files: Lib/logging/__init__.py | 3 +++ Misc/NEWS | 2 ++ 2 files changed, 5 insertions(+), 0 deletions(-) diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -899,6 +899,9 @@ # couldn't find the right stack frame, for some reason sys.stderr.write('Logged from file %s, line %s\n' % ( record.filename, record.lineno)) + # Issue 18671: output logging message and arguments + sys.stderr.write('Message: %r\n' + 'Arguments: %s\n' % (record.msg, record.args)) except OSError: #pragma: no cover pass # see issue 5971 finally: diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -22,6 +22,8 @@ Library ------- +- Issue #18671: Output more information when logging exceptions occur. + - Issue #18621: Prevent the site module's patched builtins from keeping too many references alive for too long. -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Fri Aug 9 05:47:35 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Fri, 09 Aug 2013 05:47:35 +0200 Subject: [Python-checkins] Daily reference leaks (a206f952668e): sum=6 Message-ID: results for a206f952668e on branch "default" -------------------------------------------- test_robotparser leaked [0, 0, 6] memory blocks, sum=6 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflog6ekeEk', '-x'] From python-checkins at python.org Fri Aug 9 16:46:59 2013 From: python-checkins at python.org (brett.cannon) Date: Fri, 9 Aug 2013 16:46:59 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Add_PEP_450=3A_Adding_A_Stati?= =?utf-8?q?stics_Module_To_The_Standard_Library=2C_by_Steven?= Message-ID: <3cBTmz5GmPz7Lwv@mail.python.org> http://hg.python.org/peps/rev/d8e0108ba02c changeset: 5041:d8e0108ba02c user: Brett Cannon date: Fri Aug 09 10:46:53 2013 -0400 summary: Add PEP 450: Adding A Statistics Module To The Standard Library, by Steven D'Aprano files: pep-0450.txt | 420 +++++++++++++++++++++++++++++++++++++++ 1 files changed, 420 insertions(+), 0 deletions(-) diff --git a/pep-0450.txt b/pep-0450.txt new file mode 100644 --- /dev/null +++ b/pep-0450.txt @@ -0,0 +1,420 @@ +PEP: 450 +Title: Adding A Statistics Module To The Standard Library +Version: $Revision$ +Last-Modified: $Date$ +Author: Steven D'Aprano +Status: Draft +Type: Standards Track +Content-Type: text/plain +Created: 01-Aug-2013 +Python-Version: 3.4 +Post-History: + + +Abstract + + This PEP proposes the addition of a module for common statistics functions + such as mean, median, variance and standard deviation to the Python + standard library. + + +Rationale + + The proposed statistics module is motivated by the "batteries included" + philosophy towards the Python standard library. Raymond Hettinger and + other senior developers have requested a quality statistics library that + falls somewhere in between high-end statistics libraries and ad hoc + code.[1] Statistical functions such as mean, standard deviation and others + are obvious and useful batteries, familiar to any Secondary School student. + Even cheap scientific calculators typically include multiple statistical + functions such as: + + - mean + - population and sample variance + - population and sample standard deviation + - linear regression + - correlation coefficient + + Graphing calculators aimed at Secondary School students typically + include all of the above, plus some or all of: + + - median + - mode + - functions for calculating the probability of random variables + from the normal, t, chi-squared, and F distributions + - inference on the mean + + and others[2]. Likewise spreadsheet applications such as Microsoft Excel, + LibreOffice and Gnumeric include rich collections of statistical + functions[3]. + + In contrast, Python currently has no standard way to calculate even the + simplest and most obvious statistical functions such as mean. For those + who need statistical functions in Python, there are two obvious solutions: + + - install numpy and/or scipy[4]; + + - or use a Do It Yourself solution. + + Numpy is perhaps the most full-featured solution, but it has a few + disadvantages: + + - It may be overkill for many purposes. The documentation for numpy even + warns + + "It can be hard to know what functions are available in + numpy. This is not a complete list, but it does cover + most of them."[5] + + and then goes on to list over 270 functions, only a small number of + which are related to statistics. + + - Numpy is aimed at those doing heavy numerical work, and may be + intimidating to those who don't have a background in computational + mathematics and computer science. For example, numpy.mean takes four + arguments: + + mean(a, axis=None, dtype=None, out=None) + + although fortunately for the beginner or casual numpy user, three are + optional and numpy.mean does the right thing in simple cases: + + >>> numpy.mean([1, 2, 3, 4]) + 2.5 + + - For many people, installing numpy may be difficult or impossible. For + example, people in corporate environments may have to go through a + difficult, time-consuming process before being permitted to install + third-party software. For the casual Python user, having to learn about + installing third-party packages in order to average a list of numbers is + unfortunate. + + This leads to option number 2, DIY statistics functions. At first glance, + this appears to be an attractive option, due to the apparent simplicity of + common statistical functions. For example: + + def mean(data): + return sum(data)/len(data) + + def variance(data): + # Use the Computational Formula for Variance. + n = len(data) + ss = sum(x**2 for x in data) - (sum(data)**2)/n + return ss/(n-1) + + def standard_deviation(data): + return math.sqrt(variance(data)) + + The above appears to be correct with a casual test: + + >>> data = [1, 2, 4, 5, 8] + >>> variance(data) + 7.5 + + But adding a constant to every data point should not change the variance: + + >>> data = [x+1e12 for x in data] + >>> variance(data) + 0.0 + + And variance should *never* be negative: + + >>> variance(data*100) + -1239429440.1282566 + + By contrast, the proposed reference implementation gets the exactly correct + answer 7.5 for the first two examples, and a reasonably close answer for + the third: 6.012. numpy does no better[6]. + + Even simple statistical calculations contain traps for the unwary, starting + with the Computational Formula itself. Despite the name, it is numerically + unstable and can be extremely inaccurate, as can be seen above. It is + completely unsuitable for computation by computer[7]. This problem plagues + users of many programming language, not just Python[8], as coders reinvent + the same numerically inaccurate code over and over again[9], or advise + others to do so[10]. + + It isn't just the variance and standard deviation. Even the mean is not + quite as straight-forward as it might appear. The above implementation + seems too simple to have problems, but it does: + + - The built-in sum can lose accuracy when dealing with floats of wildly + differing magnitude. Consequently, the above naive mean fails this + "torture test": + + assert mean([1e30, 1, 3, -1e30]) == 1 + + returning 0 instead of 1, a purely computational error of 100%. + + - Using math.fsum inside mean will make it more accurate with float data, + but it also has the side-effect of converting any arguments to float + even when unnecessary. E.g. we should expect the mean of a list of + Fractions to be a Fraction, not a float. + + While the above mean implementation does not fail quite as catastrophically + as the naive variance does, a standard library function can do much better + than the DIY versions. + + The example above involves an especially bad set of data, but even for + more realistic data sets accuracy is important. The first step in + interpreting variation in data (including dealing with ill-conditioned + data) is often to standardize it to a series with variance 1 (and often + mean 0). This standardization requires accurate computation of the mean + and variance of the raw series. Naive computation of mean and variance + can lose precision very quickly. Because precision bounds accuracy, it is + important to use the most precise algorithms for computing mean and + variance that are practical, or the results of standardization are + themselves useless. + + +Comparison To Other Languages/Packages + + The proposed statistics library is not intended to be a competitor to such + third-party libraries as numpy/scipy, or of proprietary full-featured + statistics packages aimed at professional statisticians such as Minitab, + SAS and Matlab. It is aimed at the level of graphing and scientific + calculators. + + Most programming languages have little or no built-in support for + statistics functions. Some exceptions: + + R + R (and its proprietary cousin, S) is a programming language designed + for statistics work. It is extremely popular with statisticians and + is extremely feature-rich[11]. + + C# + + The C# LINQ package includes extension methods to calculate the + average of enumerables[12]. + + Ruby + + Ruby does not ship with a standard statistics module, despite some + apparent demand[13]. Statsample appears to be a feature-rich third- + party library, aiming to compete with R[14]. + + PHP + + PHP has an extremely feature-rich (although mostly undocumented) set + of advanced statistical functions[15]. + + Delphi + + Delphi includes standard statistical functions including Mean, Sum, + Variance, TotalVariance, MomentSkewKurtosis in its Math library[16]. + + GNU Scientific Library + + The GNU Scientific Library includes standard statistical functions, + percentiles, median and others[17]. One innovation I have borrowed + from the GSL is to allow the caller to optionally specify the pre- + calculated mean of the sample (or an a priori known population mean) + when calculating the variance and standard deviation[18]. + + +Design Decisions Of The Module + + My intention is to start small and grow the library as needed, rather than + try to include everything from the start. Consequently, the current + reference implementation includes only a small number of functions: mean, + variance, standard deviation, median, mode. (See the reference + implementation for a full list.) + + I have aimed for the following design features: + + - Correctness over speed. It is easier to speed up a correct but slow + function than to correct a fast but buggy one. + + - Concentrate on data in sequences, allowing two-passes over the data, + rather than potentially compromise on accuracy for the sake of a one-pass + algorithm. Functions expect data will be passed as a list or other + sequence; if given an iterator, they may internally convert to a list. + + - Functions should, as much as possible, honour any type of numeric data. + E.g. the mean of a list of Decimals should be a Decimal, not a float. + When this is not possible, treat float as the "lowest common data type". + + - Although functions support data sets of floats, Decimals or Fractions, + there is no guarantee that *mixed* data sets will be supported. (But on + the other hand, they aren't explicitly rejected either.) + + - Plenty of documentation, aimed at readers who understand the basic + concepts but may not know (for example) which variance they should use + (population or sample?). Mathematicians and statisticians have a terrible + habit of being inconsistent with both notation and terminology[19], and + having spent many hours making sense of the contradictory/confusing + definitions in use, it is only fair that I do my best to clarify rather + than obfuscate the topic. + + - But avoid going into tedious[20] mathematical detail. + + +Specification + + As the proposed reference implementation is in pure Python, + other Python implementations can easily make use of the module + unchanged, or adapt it as they see fit. + + +What Should Be The Name Of The Module? + + This will be a top-level module "statistics". + + There was some interest in turning math into a package, and making this a + sub-module of math, but the general consensus eventually agreed on a + top-level module. Other potential but rejected names included "stats" (too + much risk of confusion with existing "stat" module), and "statslib" + (described as "too C-like"). + + +Previous Discussions + + This proposal has been previously discussed here[21]. + + +Frequently Asked Questions + + Q: Shouldn't this module spend time on PyPI before being considered for + the standard library? + + A: Older versions of this module have been available on PyPI[22] since + 2010. Being much simpler than numpy, it does not require many years of + external development. + + Q: Does the standard library really need yet another version of ``sum``? + + A: This proved to be the most controversial part of the reference + implementation. In one sense, clearly three sums is two too many. But + in another sense, yes. The reasons why the two existing versions are + unsuitable are described here[23] but the short summary is: + + - the built-in sum can lose precision with floats; + + - the built-in sum accepts any non-numeric data type that supports + the + operator, apart from strings and bytes; + + - math.fsum is high-precision, but coerces all arguments to float. + + There is some interest in "fixing" one or the other of the existing + sums. If this occurs before 3.4 feature-freeze, the decision to keep + statistics.sum can be re-considered. + + Q: Will this module be backported to older versions of Python? + + A: The module currently targets 3.3, and I will make it available on PyPI + for 3.3 for the foreseeable future. Backporting to older versions of + the 3.x series is likely (but not yet decided). Backporting to 2.7 is + less likely but not ruled out. + + Q: Is this supposed to replace numpy? + + A: No. While it is likely to grow over the years (see open issues below) + it is not aimed to replace, or even compete directly with, numpy. Numpy + is a full-featured numeric library aimed at professionals, the nuclear + reactor of numeric libraries in the Python ecosystem. This is just a + battery, as in "batteries included", and is aimed at an intermediate + level somewhere between "use numpy" and "roll your own version". + + +Open and Deferred Issues + + - At this stage, I am unsure of the best API for multivariate statistical + functions such as linear regression, correlation coefficient, and + covariance. Possible APIs include: + + * Separate arguments for x and y data: + function([x0, x1, ...], [y0, y1, ...]) + + * A single argument for (x, y) data: + function([(x0, y0), (x1, y1), ...]) + + * Selecting arbitrary columns from a 2D array: + function([[a0, x0, y0, z0], [a1, x1, y1, z1], ...], x=1, y=2) + + * Some combination of the above. + + In the absence of a consensus of preferred API for multivariate stats, + I will defer including such multivariate functions until Python 3.5. + + - Likewise, functions for calculating probability of random variables and + inference testing (e.g. Student's t-test) will be deferred until 3.5. + + - There is considerable interest in including one-pass functions that can + calculate multiple statistics from data in iterator form, without having + to convert to a list. The experimental "stats" package on PyPI includes + co-routine versions of statistics functions. Including these will be + deferred to 3.5. + + +References + + [1] http://mail.python.org/pipermail/python-dev/2010-October/104721.html + + [2] http://support.casio.com/pdf/004/CP330PLUSver310_Soft_E.pdf + + [3] Gnumeric: + https://projects.gnome.org/gnumeric/functions.shtml + + LibreOffice: + https://help.libreoffice.org/Calc/Statistical_Functions_Part_One + https://help.libreoffice.org/Calc/Statistical_Functions_Part_Two + https://help.libreoffice.org/Calc/Statistical_Functions_Part_Three + https://help.libreoffice.org/Calc/Statistical_Functions_Part_Four + https://help.libreoffice.org/Calc/Statistical_Functions_Part_Five + + [4] Scipy: http://scipy-central.org/ + Numpy: http://www.numpy.org/ + + [5] http://wiki.scipy.org/Numpy_Functions_by_Category + + [6] Tested with numpy 1.6.1 and Python 2.7. + + [7] http://www.johndcook.com/blog/2008/09/26/comparing-three-methods-of-computing-standard-deviation/ + + [8] http://rosettacode.org/wiki/Standard_deviation + + [9] https://bitbucket.org/larsyencken/simplestats/src/c42e048a6625/src/basic.py + + [10] http://stackoverflow.com/questions/2341340/calculate-mean-and-variance-with-one-iteration + + [11] http://www.r-project.org/ + + [12] http://msdn.microsoft.com/en-us/library/system.linq.enumerable.average.aspx + + [13] https://www.bcg.wisc.edu/webteam/support/ruby/standard_deviation + + [14] http://ruby-statsample.rubyforge.org/ + + [15] http://www.php.net/manual/en/ref.stats.php + + [16] http://www.ayton.id.au/gary/it/Delphi/D_maths.htm#Delphi%20Statistical%20functions. + + [17] http://www.gnu.org/software/gsl/manual/html_node/Statistics.html + + [18] http://www.gnu.org/software/gsl/manual/html_node/Mean-and-standard-deviation-and-variance.html + + [19] http://mathworld.wolfram.com/Skewness.html + + [20] At least, tedious to those who don't like this sort of thing. + + [21] http://mail.python.org/pipermail/python-ideas/2011-September/011524.html + + [22] https://pypi.python.org/pypi/stats/ + + [23] http://mail.python.org/pipermail/python-ideas/2013-August/022630.html + + +Copyright + + This document has been placed in the public domain. + + + +Local Variables: +mode: indented-text +indent-tabs-mode: nil +sentence-end-double-space: t +fill-column: 70 +coding: utf-8 +End: -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Aug 9 17:50:12 2013 From: python-checkins at python.org (eli.bendersky) Date: Fri, 9 Aug 2013 17:50:12 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?devguide=3A_Add_Stefan_Behnel_to_the_?= =?utf-8?q?experts_list_for_xml=2Eetree?= Message-ID: <3cBW9w2Z08z7Lrr@mail.python.org> http://hg.python.org/devguide/rev/747970502b23 changeset: 635:747970502b23 user: Eli Bendersky date: Fri Aug 09 08:50:05 2013 -0700 summary: Add Stefan Behnel to the experts list for xml.etree files: experts.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/experts.rst b/experts.rst --- a/experts.rst +++ b/experts.rst @@ -249,7 +249,7 @@ xml.dom xml.dom.minidom xml.dom.pulldom -xml.etree effbot (inactive), eli.bendersky* +xml.etree effbot (inactive), eli.bendersky*, scoder xml.parsers.expat christian.heimes xml.sax christian.heimes xml.sax.handler -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Fri Aug 9 22:20:28 2013 From: python-checkins at python.org (r.david.murray) Date: Fri, 9 Aug 2013 22:20:28 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_=2318600=3A_add_policy_to_?= =?utf-8?q?add=5Fstring=2C_and_as=5Fbytes_and_=5F=5Fbytes=5F=5F_methods=2E?= Message-ID: <3cBd9m2KrCz7M2F@mail.python.org> http://hg.python.org/cpython/rev/53287858e71f changeset: 85076:53287858e71f user: R David Murray date: Fri Aug 09 16:15:28 2013 -0400 summary: #18600: add policy to add_string, and as_bytes and __bytes__ methods. This was triggered by wanting to make the doctest in email.policy.rst pass; as_bytes and __bytes__ are clearly useful now that we have BytesGenerator. Also updated the Message docs to document the policy keyword that was added in 3.3. files: Doc/library/email.message.rst | 78 ++++++++++++++++-- Doc/library/email.policy.rst | 3 +- Doc/whatsnew/3.4.rst | 20 ++++ Lib/email/message.py | 44 ++++++++- Lib/test/test_email/test_email.py | 33 +++++++- Misc/NEWS | 3 + 6 files changed, 160 insertions(+), 21 deletions(-) diff --git a/Doc/library/email.message.rst b/Doc/library/email.message.rst --- a/Doc/library/email.message.rst +++ b/Doc/library/email.message.rst @@ -31,19 +31,32 @@ Here are the methods of the :class:`Message` class: -.. class:: Message() +.. class:: Message(policy=compat32) - The constructor takes no arguments. + The *policy* argument determiens the :mod:`~email.policy` that will be used + to update the message model. The default value, :class:`compat32 + ` maintains backward compatibility with the + Python 3.2 version of the email package. For more information see the + :mod:`~email.policy` documentation. + .. versionchanged:: 3.3 The *policy* keyword argument was added. - .. method:: as_string(unixfrom=False, maxheaderlen=0) + + .. method:: as_string(unixfrom=False, maxheaderlen=0, policy=None) Return the entire message flattened as a string. When optional *unixfrom* - is ``True``, the envelope header is included in the returned string. - *unixfrom* defaults to ``False``. Flattening the message may trigger - changes to the :class:`Message` if defaults need to be filled in to - complete the transformation to a string (for example, MIME boundaries may - be generated or modified). + is true, the envelope header is included in the returned string. + *unixfrom* defaults to ``False``. For backward compabitility reasons, + *maxheaderlen* defaults to ``0``, so if you want a different value you + must override it explicitly (the value specified for *max_line_length* in + the policy will be ignored by this method). The *policy* argument may be + used to override the default policy obtained from the message instance. + This can be used to control some of the formatting produced by the + method, since the specified *policy* will be passed to the ``Generator``. + + Flattening the message may trigger changes to the :class:`Message` if + defaults need to be filled in to complete the transformation to a string + (for example, MIME boundaries may be generated or modified). Note that this method is provided as a convenience and may not always format the message the way you want. For example, by default it does @@ -59,10 +72,57 @@ g.flatten(msg) text = fp.getvalue() + If the message object contains binary data that is not encoded according + to RFC standards, the non-compliant data will be replaced by unicode + "unknown character" code points. (See also :meth:`.as_bytes` and + :class:`~email.generator.BytesGenerator`.) + + .. versionchanged:: 3.4 the *policy* keyword argument was added. + .. method:: __str__() - Equivalent to ``as_string(unixfrom=True)``. + Equivalent to :meth:`.as_string()`. Allows ``str(msg)`` to produce a + string containing the formatted message. + + + .. method:: as_bytes(unixfrom=False, policy=None) + + Return the entire message flattened as a bytes object. When optional + *unixfrom* is true, the envelope header is included in the returned + string. *unixfrom* defaults to ``False``. The *policy* argument may be + used to override the default policy obtained from the message instance. + This can be used to control some of the formatting produced by the + method, since the specified *policy* will be passed to the + ``BytesGenerator``. + + Flattening the message may trigger changes to the :class:`Message` if + defaults need to be filled in to complete the transformation to a string + (for example, MIME boundaries may be generated or modified). + + Note that this method is provided as a convenience and may not always + format the message the way you want. For example, by default it does + not do the mangling of lines that begin with ``From`` that is + required by the unix mbox format. For more flexibility, instantiate a + :class:`~email.generator.BytesGenerator` instance and use its + :meth:`flatten` method directly. For example:: + + from io import BytesIO + from email.generator import BytesGenerator + fp = BytesIO() + g = BytesGenerator(fp, mangle_from_=True, maxheaderlen=60) + g.flatten(msg) + text = fp.getvalue() + + .. versionadded:: 3.4 + + + .. method:: __bytes__() + + Equivalent to :meth:`.as_bytes()`. Allows ``bytes(msg)`` to produce a + bytes object containing the formatted message. + + .. versionadded:: 3.4 .. method:: is_multipart() diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst --- a/Doc/library/email.policy.rst +++ b/Doc/library/email.policy.rst @@ -105,7 +105,8 @@ >>> import os >>> with open('converted.txt', 'wb') as f: - ... f.write(msg.as_string(policy=msg.policy.clone(linesep=os.linesep))) + ... f.write(msg.as_bytes(policy=msg.policy.clone(linesep=os.linesep))) + 17 Policy objects can also be combined using the addition operator, producing a policy object whose settings are a combination of the non-default values of the diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -195,6 +195,26 @@ plain tuple. (Contributed by Claudiu Popa in :issue:`17818`.) +email +----- + +:meth:`~email.message.Message.as_string` now accepts a *policy* argument to +override the default policy of the message when generating a string +representation of it. This means that ``as_string`` can now be used in more +circumstances, instead of having to create and use a :mod:`~email.generator` in +order to pass formatting parameters to its ``flatten`` method. + +New method :meth:`~email.message.Message.as_bytes` added to produce a bytes +representation of the message in a fashion similar to how ``as_string`` +produces a string representation. It does not accept the *maxheaderlen* +argument, but does accept the *unixfrom* and *policy* arguments. The +:class:`~email.message.Message` :meth:`~email.message.Message.__bytes__` method +calls it, meaning that ``bytes(mymsg)`` will now produce the intuitive +result: a bytes object containing the fully formatted message. + +(Contributed by R. David Murray in :issue:`18600`.) + + functools --------- diff --git a/Lib/email/message.py b/Lib/email/message.py --- a/Lib/email/message.py +++ b/Lib/email/message.py @@ -132,22 +132,50 @@ def __str__(self): """Return the entire formatted message as a string. - This includes the headers, body, and envelope header. """ return self.as_string() - def as_string(self, unixfrom=False, maxheaderlen=0): + def as_string(self, unixfrom=False, maxheaderlen=0, policy=None): """Return the entire formatted message as a string. - Optional `unixfrom' when True, means include the Unix From_ envelope - header. - This is a convenience method and may not generate the message exactly - as you intend. For more flexibility, use the flatten() method of a - Generator instance. + Optional 'unixfrom', when true, means include the Unix From_ envelope + header. For backward compatibility reasons, if maxheaderlen is + not specified it defaults to 0, so you must override it explicitly + if you want a different maxheaderlen. 'policy' is passed to the + Generator instance used to serialize the mesasge; if it is not + specified the policy associated with the message instance is used. + + If the message object contains binary data that is not encoded + according to RFC standards, the non-compliant data will be replaced by + unicode "unknown character" code points. """ from email.generator import Generator + policy = self.policy if policy is None else policy fp = StringIO() - g = Generator(fp, mangle_from_=False, maxheaderlen=maxheaderlen) + g = Generator(fp, + mangle_from_=False, + maxheaderlen=maxheaderlen, + policy=policy) + g.flatten(self, unixfrom=unixfrom) + return fp.getvalue() + + def __bytes__(self): + """Return the entire formatted message as a bytes object. + """ + return self.as_bytes() + + def as_bytes(self, unixfrom=False, policy=None): + """Return the entire formatted message as a bytes object. + + Optional 'unixfrom', when true, means include the Unix From_ envelope + header. 'policy' is passed to the BytesGenerator instance used to + serialize the message; if not specified the policy associated with + the message instance is used. + """ + from email.generator import BytesGenerator + policy = self.policy if policy is None else policy + fp = BytesIO() + g = BytesGenerator(fp, mangle_from_=False, policy=policy) g.flatten(self, unixfrom=unixfrom) return fp.getvalue() diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -249,15 +249,42 @@ self.assertTrue('TO' in msg) def test_as_string(self): - eq = self.ndiffAssertEqual msg = self._msgobj('msg_01.txt') with openfile('msg_01.txt') as fp: text = fp.read() - eq(text, str(msg)) + self.assertEqual(text, str(msg)) fullrepr = msg.as_string(unixfrom=True) lines = fullrepr.split('\n') self.assertTrue(lines[0].startswith('From ')) - eq(text, NL.join(lines[1:])) + self.assertEqual(text, NL.join(lines[1:])) + + def test_as_string_policy(self): + msg = self._msgobj('msg_01.txt') + newpolicy = msg.policy.clone(linesep='\r\n') + fullrepr = msg.as_string(policy=newpolicy) + s = StringIO() + g = Generator(s, policy=newpolicy) + g.flatten(msg) + self.assertEqual(fullrepr, s.getvalue()) + + def test_as_bytes(self): + msg = self._msgobj('msg_01.txt') + with openfile('msg_01.txt', 'rb') as fp: + data = fp.read() + self.assertEqual(data, bytes(msg)) + fullrepr = msg.as_bytes(unixfrom=True) + lines = fullrepr.split(b'\n') + self.assertTrue(lines[0].startswith(b'From ')) + self.assertEqual(data, b'\n'.join(lines[1:])) + + def test_as_bytes_policy(self): + msg = self._msgobj('msg_01.txt') + newpolicy = msg.policy.clone(linesep='\r\n') + fullrepr = msg.as_bytes(policy=newpolicy) + s = BytesIO() + g = BytesGenerator(s,policy=newpolicy) + g.flatten(msg) + self.assertEqual(fullrepr, s.getvalue()) # test_headerregistry.TestContentTypeHeader.bad_params def test_bad_param(self): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -22,6 +22,9 @@ Library ------- +- Issue #18600: Added policy argument to email.message.Message.as_string, + and as_bytes and __bytes__ methods to Message. + - Issue #18671: Output more information when logging exceptions occur. - Issue #18621: Prevent the site module's patched builtins from keeping -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 9 22:20:29 2013 From: python-checkins at python.org (r.david.murray) Date: Fri, 9 Aug 2013 22:20:29 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NjAwOiBJbiAz?= =?utf-8?q?=2E3=2C_as=5Fstring_does_not_accept_a_policy_keyword=2E?= Message-ID: <3cBd9n4NsYz7M2g@mail.python.org> http://hg.python.org/cpython/rev/8fbaf4e649af changeset: 85077:8fbaf4e649af branch: 3.3 parent: 85073:8b557ef46d7c user: R David Murray date: Fri Aug 09 16:17:00 2013 -0400 summary: #18600: In 3.3, as_string does not accept a policy keyword. Also, document the policy keyword that was added to Message in 3.3. files: Doc/library/email.message.rst | 10 ++++++++-- Doc/library/email.policy.rst | 10 ---------- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/Doc/library/email.message.rst b/Doc/library/email.message.rst --- a/Doc/library/email.message.rst +++ b/Doc/library/email.message.rst @@ -31,9 +31,15 @@ Here are the methods of the :class:`Message` class: -.. class:: Message() +.. class:: Message(policy=compat32) - The constructor takes no arguments. + The *policy* argument determiens the :mod:`~email.policy` that will be used + to update the message model. The default value, :class:`compat32 + ` maintains backward compatibility with the + Python 3.2 version of the email package. For more information see the + :mod:`~email.policy` documentation. + + .. versionchanged:: 3.3 The *policy* keyword argument was added. .. method:: as_string(unixfrom=False, maxheaderlen=0) diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst --- a/Doc/library/email.policy.rst +++ b/Doc/library/email.policy.rst @@ -97,16 +97,6 @@ ``sendmail's`` ``stdin``, where the default policy would use ``\n`` line separators. -Some email package methods accept a *policy* keyword argument, allowing the -policy to be overridden for that method. For example, the following code uses -the :meth:`~email.message.Message.as_string` method of the *msg* object from -the previous example and writes the message to a file using the native line -separators for the platform on which it is running:: - - >>> import os - >>> with open('converted.txt', 'wb') as f: - ... f.write(msg.as_string(policy=msg.policy.clone(linesep=os.linesep))) - Policy objects can also be combined using the addition operator, producing a policy object whose settings are a combination of the non-default values of the summed objects:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 9 22:20:30 2013 From: python-checkins at python.org (r.david.murray) Date: Fri, 9 Aug 2013 22:20:30 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Null_merge_related_to_=2318600=2E?= Message-ID: <3cBd9p6Nnnz7M39@mail.python.org> http://hg.python.org/cpython/rev/b9a5b7e3b32f changeset: 85078:b9a5b7e3b32f parent: 85076:53287858e71f parent: 85077:8fbaf4e649af user: R David Murray date: Fri Aug 09 16:20:06 2013 -0400 summary: Null merge related to #18600. For 3.3, we just deleted the example. In 3.4 it was fixed. files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 01:05:12 2013 From: python-checkins at python.org (victor.stinner) Date: Sat, 10 Aug 2013 01:05:12 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_add_section_=22Iss?= =?utf-8?q?ues_fixed_in_the_subprocess_module=22?= Message-ID: <3cBhqr576fz7M2g@mail.python.org> http://hg.python.org/peps/rev/18cd79b36bc1 changeset: 5042:18cd79b36bc1 user: Victor Stinner date: Sat Aug 10 01:02:09 2013 +0200 summary: PEP 446: add section "Issues fixed in the subprocess module" files: pep-0446.txt | 27 +++++++++++++++++++++++++++ 1 files changed, 27 insertions(+), 0 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -154,6 +154,33 @@ data. +Issues fixed in the subprocess module +------------------------------------- + +Inherited file descriptors caused 4 issues in the ``subprocess`` +module: + +* `Issue #2320: Race condition in subprocess using stdin + `_ (created in 2008) +* `Issue #3006: subprocess.Popen causes socket to remain open after + close `_ (created in 2008) +* `Issue #7213: subprocess leaks open file descriptors between Popen + instances causing hangs `_ + (created in 2009) +* `Issue #12786: subprocess wait() hangs when stdin is closed + `_ (created in 2011) + +These issues were fixed in Python 3.2 by 4 different changes in the +``subprocess`` module: + +* Pipes are now non-inheritable ; +* The default value of the *close_fds* parameter is now ``True``, + with one exception on Windows: the default value is ``False`` if + at least one standard stream is replaced ; +* A new *pass_fds* parameter has been added ; +* Creation of a ``_posixsubprocess`` module implemented in C. + + Atomic Creation of non-inheritable File Descriptors --------------------------------------------------- -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Aug 10 01:05:13 2013 From: python-checkins at python.org (victor.stinner) Date: Sat, 10 Aug 2013 01:05:13 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_Issues=3A_add_exam?= =?utf-8?q?ples_in_other_projects?= Message-ID: <3cBhqs6sBZz7M3M@mail.python.org> http://hg.python.org/peps/rev/ba6ad5dd0fba changeset: 5043:ba6ad5dd0fba user: Victor Stinner date: Sat Aug 10 01:00:07 2013 +0200 summary: PEP 446: Issues: add examples in other projects files: pep-0446.txt | 41 +++++++++++++++++++++++++++++++++++---- 1 files changed, 36 insertions(+), 5 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -128,7 +128,7 @@ processes are not noticed, because they don't cause major bugs. It does not mean that these bugs must not be fixed. -Two examples of common issues with inherited file descriptors: +Two common issues with inherited file descriptors: * On Windows, a directory cannot be removed before all file handles open in the directory are closed. The same issue can be seen with files, @@ -153,6 +153,30 @@ socket, a child process can accept new connections to read sensitive data. +Example of issues in open source projects: + +* `Mozilla (Firefox) `_: + open since 2002-05 +* `dbus library `_: + fixed in 2008-05 (`dbus commit + `_), + close file descriptors in the child process +* `autofs `_: + fixed in 2009-02, set the CLOEXEC flag +* `qemu `_: + fixed in 2009-12 (`qemu commit + `_), + set CLOEXEC flag +* `Tor `_: + fixed in 2010-12, set CLOEXEC flag +* `OCaml `_: open since + 2011-04, "PR#5256: Processes opened using Unix.open_process* inherit + all opened file descriptors (including sockets)" +* `?MQ `_: + open since 2012-08 +* `Squid `_: + open since 2012-07 + Issues fixed in the subprocess module ------------------------------------- @@ -327,8 +351,13 @@ ``/proc//fd/``, and so performances depends on the number of open file descriptors, not on MAXFD. -See also the `issue #1663329: subprocess close_fds perform poor if -SC_OPEN_MAX is high `_. +See also: + +* `Python issue #1663329 `_: + subprocess close_fds perform poor if ``SC_OPEN_MAX`` is high +* `Squid Bug #837033 `_: + Squid should set CLOEXEC on opened FDs. "32k+ close() calls in each + child process take a long time ([12-56] seconds) in Xen PV guests." Proposal @@ -425,8 +454,8 @@ descriptors non-inheritable by default: since Go 1.0 (2009), Perl 1.0 (1987) and Ruby 2.0 (2013). -The SCons project overrides builtin functions ``file()`` and ``open()`` -to make files non-inheritable on Windows: +The SCons project, written in Python, overrides builtin functions +``file()`` and ``open()`` to make files non-inheritable on Windows: see `win32.py `_. @@ -508,6 +537,8 @@ * `Ghosts of Unix past, part 2: Conflated designs `_ (Neil Brown, 2010) explains the history of ``O_CLOEXEC`` and ``O_NONBLOCK`` flags +* `File descriptor handling changes in 2.6.27 + `_ Copyright -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Aug 10 01:05:15 2013 From: python-checkins at python.org (victor.stinner) Date: Sat, 10 Aug 2013 01:05:15 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_add_example_of_vul?= =?utf-8?q?nerabilities?= Message-ID: <3cBhqv1RYQz7M3L@mail.python.org> http://hg.python.org/peps/rev/e2f9feb6be35 changeset: 5044:e2f9feb6be35 user: Victor Stinner date: Sat Aug 10 00:51:14 2013 +0200 summary: PEP 446: add example of vulnerabilities files: pep-0446.txt | 46 ++++++++++++++++++++++++++++----------- 1 files changed, 33 insertions(+), 13 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -140,19 +140,6 @@ the server restarts while the program is not done: the server cannot start because the TCP port is still in use. -Leaking file descriptors is also a well known security vulnerability: -read -`FIO42-C. Ensure files are properly closed when they are no longer -needed -`_ -of the CERT. - -An untrusted child process can read sensitive data like passwords and -take control of the parent process though leaked file descriptors. It is -for example a way to escape from a chroot. With a leaked listening -socket, a child process can accept new connections to read sensitive -data. - Example of issues in open source projects: * `Mozilla (Firefox) `_: @@ -178,6 +165,39 @@ open since 2012-07 +Security Vulnerability +---------------------- + +Leaking file descriptors is also a well known security vulnerability: +read +`FIO42-C. Ensure files are properly closed when they are no longer +needed +`_ +of the CERT. + +An untrusted child process can read sensitive data like passwords and +take control of the parent process though leaked file descriptors. It is +for example a way to escape from a chroot. With a leaked listening +socket, a child process can accept new connections to read sensitive +data. + +Example of vulnerabilities: + +* `Hijacking Apache https by mod_php + `_ (2003) + + * Apache: `Apr should set FD_CLOEXEC if APR_FOPEN_NOCLEANUP is not set + `_: + fixed in 2009 + * PHP: `system() (and similar) don't cleanup opened handles of Apache + `_: open since 2006 +* `CWE-403: Exposure of File Descriptor to Unintended Control Sphere + `_ (2008) +* `OpenSSH Security Advisory: portable-keysign-rand-helper.adv + `_ + (2011) + + Issues fixed in the subprocess module ------------------------------------- -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Aug 10 01:12:08 2013 From: python-checkins at python.org (victor.stinner) Date: Sat, 10 Aug 2013 01:12:08 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_fix_typo?= Message-ID: <3cBhzr75d9z7M2g@mail.python.org> http://hg.python.org/peps/rev/8c5b39e616ac changeset: 5045:8c5b39e616ac user: Victor Stinner date: Sat Aug 10 01:12:01 2013 +0200 summary: PEP 446: fix typo files: pep-0446.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -546,7 +546,7 @@ `_ * `#17070: Use the new cloexec to improve security and avoid bugs `_ -* `#18571: Implementation of the PEP 446: non-inheriable file +* `#18571: Implementation of the PEP 446: non-inheritable file descriptors `_ Other links: -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Aug 10 01:35:19 2013 From: python-checkins at python.org (andrew.svetlov) Date: Sat, 10 Aug 2013 01:35:19 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Fix_markup?= Message-ID: <3cBjVb0JZhz7M5y@mail.python.org> http://hg.python.org/peps/rev/704d80be308a changeset: 5046:704d80be308a user: Andrew Svetlov date: Sat Aug 10 02:35:07 2013 +0300 summary: Fix markup files: pep-3156.txt | 14 +++++++------- 1 files changed, 7 insertions(+), 7 deletions(-) diff --git a/pep-3156.txt b/pep-3156.txt --- a/pep-3156.txt +++ b/pep-3156.txt @@ -721,17 +721,17 @@ Signal callbacks '''''''''''''''' -- ``add_signal_handler(sig, callback, *args). Whenever signal ``sig`` - is received, arrange for ``callback(*args)`` to be called. +- ``add_signal_handler(sig, callback, *args)``. Whenever signal + ``sig`` is received, arrange for ``callback(*args)`` to be called. Specifying another callback for the same signal replaces the previous handler (only one handler can be active per signal). The ``sig`` must be a valid sigal number defined in the ``signal`` module. If the signal cannot be handled this raises an exception: - ``ValueError`` if it is not a valid signal or if it is an uncatchable - signale (e.g. ``SIGKILL``), ``RuntimeError`` if this particular event - loop instance cannot handle signals (since signals are global per - process, only an event loop associated with the main thread can - handle signals). + ``ValueError`` if it is not a valid signal or if it is an + uncatchable signale (e.g. ``SIGKILL``), ``RuntimeError`` if this + particular event loop instance cannot handle signals (since signals + are global per process, only an event loop associated with the main + thread can handle signals). - ``remove_signal_handler(sig)``. Removes the handler for signal ``sig``, if one is set. Raises the same exceptions as -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Aug 10 03:58:21 2013 From: python-checkins at python.org (guido.van.rossum) Date: Sat, 10 Aug 2013 03:58:21 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Get_rid_of_Future=2Erunning?= =?utf-8?b?KCk7IHNlZSBidWcgMTg2OTku?= Message-ID: <3cBmgd1vrJz7M67@mail.python.org> http://hg.python.org/peps/rev/922c0fe6ebee changeset: 5047:922c0fe6ebee user: Guido van Rossum date: Fri Aug 09 17:26:53 2013 -0700 summary: Get rid of Future.running(); see bug 18699. files: pep-3156.txt | 8 +++----- 1 files changed, 3 insertions(+), 5 deletions(-) diff --git a/pep-3156.txt b/pep-3156.txt --- a/pep-3156.txt +++ b/pep-3156.txt @@ -728,7 +728,7 @@ ``sig`` must be a valid sigal number defined in the ``signal`` module. If the signal cannot be handled this raises an exception: ``ValueError`` if it is not a valid signal or if it is an - uncatchable signale (e.g. ``SIGKILL``), ``RuntimeError`` if this + uncatchable signal (e.g. ``SIGKILL``), ``RuntimeError`` if this particular event loop instance cannot handle signals (since signals are global per process, only an event loop associated with the main thread can handle signals). @@ -817,9 +817,6 @@ - ``cancelled()``. Returns ``True`` if the Future was cancelled. -- ``running()``. Always returns ``False``. Difference with PEP 3148: - there is no "running" state. - - ``done()``. Returns ``True`` if the Future is done. Note that a cancelled Future is considered done too (here and everywhere). @@ -861,7 +858,8 @@ callbacks. Difference with PEP 3148: This is a public API. The internal method ``set_running_or_notify_cancel()`` is not -supported; there is no way to set the running state. +supported; there is no way to set the running state. Likewise, +the method ``running()`` is not supported. The following exceptions are defined: -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Aug 10 03:58:22 2013 From: python-checkins at python.org (guido.van.rossum) Date: Sat, 10 Aug 2013 03:58:22 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Somewhat_rambling_changes_to_?= =?utf-8?q?event_loop_policy=2E?= Message-ID: <3cBmgf4gVFz7M7H@mail.python.org> http://hg.python.org/peps/rev/e6979b5e14fa changeset: 5048:e6979b5e14fa user: Guido van Rossum date: Fri Aug 09 17:27:42 2013 -0700 summary: Somewhat rambling changes to event loop policy. files: pep-3156.txt | 63 ++++++++++++++++++++------------------- 1 files changed, 33 insertions(+), 30 deletions(-) diff --git a/pep-3156.txt b/pep-3156.txt --- a/pep-3156.txt +++ b/pep-3156.txt @@ -165,50 +165,53 @@ ------------------------------------------------------------- Event loop management is controlled by an event loop policy, which is -a global (per-process) state. There is a default policy, and an API -to change the policy. The policy defines the notion of context; the -default policy's notion of context is defined as the current thread. +a global (per-process) object. There is a default policy, and an API +to change the policy. A policy defines the notion of context; a +policy manages a separate event loop per context. The default +policy's notion of context is defined as the current thread. Certain platforms or programming frameworks may change the default policy to something more suitable to the expectations of the users of that platform or framework. Such platforms or frameworks must document their policy and at what point during their initialization -sequence the policy is set. in order to avoid undefined behavior when +sequence the policy is set, in order to avoid undefined behavior when multiple active frameworks want to override the default policy. -An event loop policy may but does not have to enforce that there is -only one event loop in existence. The default event loop policy does -not enforce this, but it does enforce that there is only one event -loop per thread (as far as ``get_event_loop()`` is concerned). +To get the event loop for current context, use ``get_event_loop()``. +This returns an event loop object implementing the interface specified +below, or raises an exception in case no event loop has been set for +the current context and the current policy does not specify to create +one. It should never return ``None``. -To get the current event loop, use ``get_event_loop()``. This returns -an event loop object implementing the interface specified below, or -``None`` in case no current event loop has been set and the current -policy does not specify how to create one for the current context. It -is expected that ``get_event_loop()`` returns a different object -depending on the context, and the default policy will only create a -default event loop in the main thread; in other threads an event loop -must be explicitly set (but other policies may behave differently). -Event loop creation is lazy; i.e. the first call to -``get_event_loop()`` creates an event loop instance if necessary and -specified by the current policy. +To set the event loop for the current context, use +``set_event_loop(event_loop)``, where ``event_loop`` is an event loop +object. It is okay to set the current event loop to ``None``, in +which case subsequent calls to ``get_event_loop()`` will raise an +exception. This is useful for testing code that should not depend on +the existence of a default event loop. -To set the current event loop, use ``set_event_loop(event_loop)``, -where ``event_loop`` is an event loop object. It is allowed to set -the current event loop to ``None`` (although under the default policy, -if the main thread's current event loop is set to ``None``, and -``get_event_loop()`` is called subsequently, it will create a new -event loop instance. +It is expected that ``get_event_loop()`` returns a different event +loop object depending on the context (in fact, this is the definition +of context). It may create a new event loop object if none is set and +creation is allowed by the policy. The default policy will create a +new event loop only in the main thread, and only if +``get_event_loop()`` is called before ``set_event_loop()`` is ever +called. (To reset this state, reset the policy.) In other threads an +event loop must be explicitly set. Other policies may behave +differently. Event loop by the default policy creation is lazy; +i.e. the first call to ``get_event_loop()`` creates an event loop +instance if necessary and specified by the current policy. For the benefit of unit tests and other special cases there's a third policy function: ``new_event_loop()``, which creates and returns a new event loop object according to the policy's default rules. To make -this the current event loop, you must call ``set_event_loop()``. +this the current event loop, you must call ``set_event_loop()`` with +it. -To change the event loop policy, call -``set_event_loop_policy(policy)``, where ``policy`` is an event loop -policy object or ``None``. The policy object must be an object that -has methods ``get_event_loop()``, ``set_event_loop(loop)`` and +To change the event loop policy, +call ``set_event_loop_policy(policy)``, where ``policy`` is an event +loop policy object or ``None``. The policy object must be an object +that has methods ``get_event_loop()``, ``set_event_loop(loop)`` and ``new_event_loop()``, all behaving like the functions described above. Passing a policy value of ``None`` restores the default event loop policy (overriding the alternate default set by the platform or -- Repository URL: http://hg.python.org/peps From solipsis at pitrou.net Sat Aug 10 05:45:57 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sat, 10 Aug 2013 05:45:57 +0200 Subject: [Python-checkins] Daily reference leaks (b9a5b7e3b32f): sum=0 Message-ID: results for b9a5b7e3b32f on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/refloggAOdG_', '-x'] From python-checkins at python.org Sat Aug 10 14:58:46 2013 From: python-checkins at python.org (eli.bendersky) Date: Sat, 10 Aug 2013 14:58:46 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NjY4?= =?utf-8?q?=3A_Further_clarify_m=5Fsize_setting_for_non-negative_values?= Message-ID: <3cC3Kf4zjKz7LmJ@mail.python.org> http://hg.python.org/cpython/rev/f55ba27776d4 changeset: 85079:f55ba27776d4 branch: 3.3 parent: 85077:8fbaf4e649af user: Eli Bendersky date: Sat Aug 10 05:57:27 2013 -0700 summary: Issue #18668: Further clarify m_size setting for non-negative values files: Doc/c-api/module.rst | 8 ++++---- 1 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -191,10 +191,10 @@ freed when the module object is deallocated, after the :c:member:`m_free` function has been called, if present. - Setting ``m_size`` to a positive value specifies the size of the additional - memory required by the module. Setting it to ``-1`` means that the module can - not be re-initialized because it has global state. Setting it to ``0`` is - forbidden. + Setting ``m_size`` to ``-1`` means that the module can not be + re-initialized because it has global state. Setting it to a non-negative + value means that the module can be re-initialized and specifies the + additional amount of memory it requires for its state. See :PEP:`3121` for more details. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 14:58:47 2013 From: python-checkins at python.org (eli.bendersky) Date: Sat, 10 Aug 2013 14:58:47 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318668=3A_Further_clarify_m=5Fsize_setting_for_n?= =?utf-8?q?on-negative_values?= Message-ID: <3cC3Kg6rl4z7M9T@mail.python.org> http://hg.python.org/cpython/rev/d43435e82e21 changeset: 85080:d43435e82e21 parent: 85078:b9a5b7e3b32f parent: 85079:f55ba27776d4 user: Eli Bendersky date: Sat Aug 10 05:58:10 2013 -0700 summary: Issue #18668: Further clarify m_size setting for non-negative values files: Doc/c-api/module.rst | 8 ++++---- 1 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -198,10 +198,10 @@ freed when the module object is deallocated, after the :c:member:`m_free` function has been called, if present. - Setting ``m_size`` to a positive value specifies the size of the additional - memory required by the module. Setting it to ``-1`` means that the module can - not be re-initialized because it has global state. Setting it to ``0`` is - forbidden. + Setting ``m_size`` to ``-1`` means that the module can not be + re-initialized because it has global state. Setting it to a non-negative + value means that the module can be re-initialized and specifies the + additional amount of memory it requires for its state. See :PEP:`3121` for more details. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 16:36:28 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 10 Aug 2013 16:36:28 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2316400=3A_Add_comm?= =?utf-8?q?and_line_option_for_isolated_mode=2E?= Message-ID: <3cC5VN4XqGz7LqW@mail.python.org> http://hg.python.org/cpython/rev/dd0d751cc7f1 changeset: 85081:dd0d751cc7f1 user: Christian Heimes date: Sat Aug 10 16:36:18 2013 +0200 summary: Issue #16400: Add command line option for isolated mode. -I Run Python in isolated mode. This also implies -E and -s. In isolated mode sys.path contains neither the script?s directory nor the user?s site-packages directory. All PYTHON* environment variables are ignored, too. Further restrictions may be imposed to prevent the user from injecting malicious code. files: Doc/c-api/init.rst | 6 +++++- Doc/using/cmdline.rst | 17 +++++++++++++++-- Doc/whatsnew/3.4.rst | 3 ++- Include/pydebug.h | 1 + Lib/test/test_cmd_line.py | 26 ++++++++++++++++++++++++++ Lib/test/test_sys.py | 2 +- Misc/NEWS | 2 ++ Misc/python.man | 14 ++++++++++++-- Modules/main.c | 9 ++++++++- Python/pythonrun.c | 1 + Python/sysmodule.c | 6 ++++-- 11 files changed, 77 insertions(+), 10 deletions(-) diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -329,7 +329,11 @@ .. c:function:: void PySys_SetArgv(int argc, wchar_t **argv) - This function works like :c:func:`PySys_SetArgvEx` with *updatepath* set to 1. + This function works like :c:func:`PySys_SetArgvEx` with *updatepath* set + to 1 unless the :program:`python` interpreter was started with the + :option:`-I`. + + .. versionchanged:: 3.4 The *updatepath* value depends on :option:`-I`. .. c:function:: void Py_SetPythonHome(wchar_t *home) diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -24,7 +24,7 @@ When invoking Python, you may specify any of these options:: - python [-bBdEhiOqsSuvVWx?] [-c command | -m module-name | script | - ] [args] + python [-bBdEhiIOqsSuvVWx?] [-c command | -m module-name | script | - ] [args] The most common use case is, of course, a simple invocation of a script:: @@ -175,6 +175,8 @@ Python 3.0 +.. _using-on-misc-options: + Miscellaneous options ~~~~~~~~~~~~~~~~~~~~~ @@ -213,6 +215,17 @@ raises an exception. See also :envvar:`PYTHONINSPECT`. +.. cmdoption:: -I + + Run Python in isolated mode. This also implies -E and -s. + In isolated mode :data:`sys.path` contains neither the script's directory nor + the user's site-packages directory. All :envvar:`PYTHON*` environment + variables are ignored, too. Further restrictions may be imposed to prevent + the user from injecting malicious code. + + .. versionadded:: 3.4 + + .. cmdoption:: -O Turn on basic optimizations. This changes the filename extension for @@ -398,7 +411,7 @@ --------------------- These environment variables influence Python's behavior, they are processed -before the command-line switches other than -E. It is customary that +before the command-line switches other than -E or -I. It is customary that command-line switches override environmental variables where there is a conflict. diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -107,7 +107,8 @@ Security improvements: -* None yet. +* command line option for :ref:`isolated mode `, + :issue:`16499`. Please read on for a comprehensive list of user-facing changes. diff --git a/Include/pydebug.h b/Include/pydebug.h --- a/Include/pydebug.h +++ b/Include/pydebug.h @@ -20,6 +20,7 @@ PyAPI_DATA(int) Py_NoUserSiteDirectory; PyAPI_DATA(int) Py_UnbufferedStdioFlag; PyAPI_DATA(int) Py_HashRandomizationFlag; +PyAPI_DATA(int) Py_IsolatedFlag; /* this is a wrapper around getenv() that pays attention to Py_IgnoreEnvironmentFlag. It should be used for getting variables like diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -4,6 +4,7 @@ import test.support, unittest import os +import shutil import sys import subprocess import tempfile @@ -439,6 +440,31 @@ self.assertEqual(b'', out) + def test_isolatedmode(self): + self.verify_valid_flag('-I') + self.verify_valid_flag('-IEs') + rc, out, err = assert_python_ok('-I', '-c', + 'from sys import flags as f; ' + 'print(f.no_user_site, f.ignore_environment, f.isolated)', + # dummyvar to prevent extranous -E + dummyvar="") + self.assertEqual(out.strip(), b'1 1 1') + with test.support.temp_cwd() as tmpdir: + fake = os.path.join(tmpdir, "uuid.py") + main = os.path.join(tmpdir, "main.py") + with open(fake, "w") as f: + f.write("raise RuntimeError('isolated mode test')\n") + with open(main, "w") as f: + f.write("import uuid\n") + f.write("print('ok')\n") + self.assertRaises(subprocess.CalledProcessError, + subprocess.check_output, + [sys.executable, main], cwd=tmpdir, + stderr=subprocess.DEVNULL) + out = subprocess.check_output([sys.executable, "-I", main], + cwd=tmpdir) + self.assertEqual(out.strip(), b"ok") + def test_main(): test.support.run_unittest(CmdLineTest) test.support.reap_children() diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -515,7 +515,7 @@ attrs = ("debug", "inspect", "interactive", "optimize", "dont_write_bytecode", "no_user_site", "no_site", "ignore_environment", "verbose", - "bytes_warning", "quiet", "hash_randomization") + "bytes_warning", "quiet", "hash_randomization", "isolated") for attr in attrs: self.assertTrue(hasattr(sys.flags, attr), attr) self.assertEqual(type(getattr(sys.flags, attr)), int, attr) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #16400: Add command line option for isolated mode. + - Issue #15301: Parsing fd, uid, and gid parameters for builtins in Modules/posixmodule.c is now far more robust. diff --git a/Misc/python.man b/Misc/python.man --- a/Misc/python.man +++ b/Misc/python.man @@ -26,11 +26,14 @@ .B \-i ] [ +.B \-I +] +.br + [ .B \-m .I module-name ] -.br - [ +[ .B \-q ] [ @@ -139,6 +142,13 @@ useful to inspect global variables or a stack trace when a script raises an exception. .TP +.B \-I +Run Python in isolated mode. This also implies \fB\-E\fP and \fB\-S\fP. In +isolated mode sys.path contains neither the script?s directory nor the user?s +site-packages directory. All PYTHON* environment variables are ignored, too. +Further restrictions may be imposed to prevent the user from injecting +malicious code. +.TP .BI "\-m " module-name Searches .I sys.path diff --git a/Modules/main.c b/Modules/main.c --- a/Modules/main.c +++ b/Modules/main.c @@ -43,7 +43,7 @@ static int orig_argc; /* command line options */ -#define BASE_OPTS L"bBc:dEhiJm:OqRsStuvVW:xX:?" +#define BASE_OPTS L"bBc:dEhiIJm:OqRsStuvVW:xX:?" #define PROGRAM_OPTS BASE_OPTS @@ -65,6 +65,7 @@ static char *usage_2 = "\ -i : inspect interactively after running script; forces a prompt even\n\ if stdin does not appear to be a terminal; also PYTHONINSPECT=x\n\ +-I : isolate Python from the user's environment (implies -E and -s)\n\ -m mod : run library module as a script (terminates option list)\n\ -O : optimize generated bytecode slightly; also PYTHONOPTIMIZE=x\n\ -OO : remove doc-strings in addition to the -O optimizations\n\ @@ -426,6 +427,12 @@ Py_InteractiveFlag++; break; + case 'I': + Py_IsolatedFlag++; + Py_NoUserSiteDirectory++; + Py_IgnoreEnvironmentFlag++; + break; + /* case 'J': reserved for Jython */ case 'O': diff --git a/Python/pythonrun.c b/Python/pythonrun.c --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -112,6 +112,7 @@ int Py_NoUserSiteDirectory = 0; /* for -s and site.py */ int Py_UnbufferedStdioFlag = 0; /* Unbuffered binary std{in,out,err} */ int Py_HashRandomizationFlag = 0; /* for -R and PYTHONHASHSEED */ +int Py_IsolatedFlag = 0; /* for -I, isolate from user's env */ PyThreadState *_Py_Finalizing = NULL; diff --git a/Python/sysmodule.c b/Python/sysmodule.c --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -1369,6 +1369,7 @@ {"bytes_warning", "-b"}, {"quiet", "-q"}, {"hash_randomization", "-R"}, + {"isolated", "-I"}, {0} }; @@ -1376,7 +1377,7 @@ "sys.flags", /* name */ flags__doc__, /* doc */ flags_fields, /* fields */ - 12 + 13 }; static PyObject* @@ -1406,6 +1407,7 @@ SetFlag(Py_BytesWarningFlag); SetFlag(Py_QuietFlag); SetFlag(Py_HashRandomizationFlag); + SetFlag(Py_IsolatedFlag); #undef SetFlag if (PyErr_Occurred()) { @@ -1944,7 +1946,7 @@ void PySys_SetArgv(int argc, wchar_t **argv) { - PySys_SetArgvEx(argc, argv, 1); + PySys_SetArgvEx(argc, argv, Py_IsolatedFlag == 0); } /* Reimplementation of PyFile_WriteString() no calling indirectly -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 16:38:33 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 10 Aug 2013 16:38:33 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_typo=2C_changeset_dd0d751c?= =?utf-8?q?c7f1_belongs_to_issue_=2316499_not_issue_=2316400?= Message-ID: <3cC5Xn1Ywrz7LtY@mail.python.org> http://hg.python.org/cpython/rev/06c39789061e changeset: 85082:06c39789061e user: Christian Heimes date: Sat Aug 10 16:38:23 2013 +0200 summary: typo, changeset dd0d751cc7f1 belongs to issue #16499 not issue #16400 files: Misc/NEWS | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,7 +10,7 @@ Core and Builtins ----------------- -- Issue #16400: Add command line option for isolated mode. +- Issue #16499: Add command line option for isolated mode. - Issue #15301: Parsing fd, uid, and gid parameters for builtins in Modules/posixmodule.c is now far more robust. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:01:22 2013 From: python-checkins at python.org (eli.bendersky) Date: Sat, 10 Aug 2013 17:01:22 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2315651=3A_PEP_3121?= =?utf-8?q?_refactoring_for_=5Felementtree?= Message-ID: <3cC6362GGKzNQC@mail.python.org> http://hg.python.org/cpython/rev/8a060e2de608 changeset: 85083:8a060e2de608 user: Eli Bendersky date: Sat Aug 10 08:00:39 2013 -0700 summary: Issue #15651: PEP 3121 refactoring for _elementtree Patch by Antoine Pitrou (based on Robin Schreiber's original patch) files: Modules/_elementtree.c | 121 +++++++++++++++++++++------- 1 files changed, 90 insertions(+), 31 deletions(-) diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -66,10 +66,51 @@ static PyTypeObject XMLParser_Type; -/* glue functions (see the init function for details) */ -static PyObject* elementtree_parseerror_obj; -static PyObject* elementtree_deepcopy_obj; -static PyObject* elementpath_obj; +/* Per-module state; PEP 3121 */ +typedef struct { + PyObject *parseerror_obj; + PyObject *deepcopy_obj; + PyObject *elementpath_obj; +} elementtreestate; + +static struct PyModuleDef elementtreemodule; + +/* Given a module object (assumed to be _elementtree), get its per-module + * state. + */ +#define ET_STATE(mod) ((elementtreestate *) PyModule_GetState(mod)) + +/* Find the module instance imported in the currently running sub-interpreter + * and get its state. + */ +#define ET_STATE_GLOBAL \ + ((elementtreestate *) PyModule_GetState(PyState_FindModule(&elementtreemodule))) + +static int +elementtree_clear(PyObject *m) +{ + elementtreestate *st = ET_STATE(m); + Py_CLEAR(st->parseerror_obj); + Py_CLEAR(st->deepcopy_obj); + Py_CLEAR(st->elementpath_obj); + return 0; +} + +static int +elementtree_traverse(PyObject *m, visitproc visit, void *arg) +{ + elementtreestate *st = ET_STATE(m); + Py_VISIT(st->parseerror_obj); + Py_VISIT(st->deepcopy_obj); + Py_VISIT(st->elementpath_obj); + return 0; +} + +static void +elementtree_free(void *m) +{ + elementtree_clear((PyObject *)m); +} /* helpers */ @@ -77,11 +118,11 @@ deepcopy(PyObject* object, PyObject* memo) { /* do a deep copy of the given object */ - PyObject* args; PyObject* result; - - if (!elementtree_deepcopy_obj) { + elementtreestate *st = ET_STATE_GLOBAL; + + if (!st->deepcopy_obj) { PyErr_SetString( PyExc_RuntimeError, "deepcopy helper not found" @@ -92,7 +133,7 @@ args = PyTuple_Pack(2, object, memo); if (!args) return NULL; - result = PyObject_CallObject(elementtree_deepcopy_obj, args); + result = PyObject_CallObject(st->deepcopy_obj, args); Py_DECREF(args); return result; } @@ -1047,6 +1088,7 @@ PyObject* tag; PyObject* namespaces = Py_None; static char *kwlist[] = {"path", "namespaces", 0}; + elementtreestate *st = ET_STATE_GLOBAL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:find", kwlist, &tag, &namespaces)) @@ -1055,7 +1097,7 @@ if (checkpath(tag) || namespaces != Py_None) { _Py_IDENTIFIER(find); return _PyObject_CallMethodId( - elementpath_obj, &PyId_find, "OOO", self, tag, namespaces + st->elementpath_obj, &PyId_find, "OOO", self, tag, namespaces ); } @@ -1083,6 +1125,7 @@ PyObject* namespaces = Py_None; _Py_IDENTIFIER(findtext); static char *kwlist[] = {"path", "default", "namespaces", 0}; + elementtreestate *st = ET_STATE_GLOBAL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|OO:findtext", kwlist, &tag, &default_value, &namespaces)) @@ -1090,7 +1133,7 @@ if (checkpath(tag) || namespaces != Py_None) return _PyObject_CallMethodId( - elementpath_obj, &PyId_findtext, "OOOO", self, tag, default_value, namespaces + st->elementpath_obj, &PyId_findtext, "OOOO", self, tag, default_value, namespaces ); if (!self->extra) { @@ -1122,6 +1165,7 @@ PyObject* tag; PyObject* namespaces = Py_None; static char *kwlist[] = {"path", "namespaces", 0}; + elementtreestate *st = ET_STATE_GLOBAL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:findall", kwlist, &tag, &namespaces)) @@ -1130,7 +1174,7 @@ if (checkpath(tag) || namespaces != Py_None) { _Py_IDENTIFIER(findall); return _PyObject_CallMethodId( - elementpath_obj, &PyId_findall, "OOO", self, tag, namespaces + st->elementpath_obj, &PyId_findall, "OOO", self, tag, namespaces ); } @@ -1162,13 +1206,14 @@ PyObject* namespaces = Py_None; _Py_IDENTIFIER(iterfind); static char *kwlist[] = {"path", "namespaces", 0}; + elementtreestate *st = ET_STATE_GLOBAL; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:iterfind", kwlist, &tag, &namespaces)) return NULL; return _PyObject_CallMethodId( - elementpath_obj, &PyId_iterfind, "OOO", self, tag, namespaces + st->elementpath_obj, &PyId_iterfind, "OOO", self, tag, namespaces ); } @@ -2351,6 +2396,7 @@ { PyObject* node; PyObject* this; + elementtreestate *st = ET_STATE_GLOBAL; if (self->data) { if (self->this == self->last) { @@ -2381,7 +2427,7 @@ } else { if (self->root) { PyErr_SetString( - elementtree_parseerror_obj, + st->parseerror_obj, "multiple elements on top level" ); goto error; @@ -2670,6 +2716,10 @@ #include "expat.h" #include "pyexpat.h" + +/* The PyExpat_CAPI structure is an immutable dispatch table, so it can be + * cached globally without being in per-module state. + */ static struct PyExpat_CAPI *expat_capi; #define EXPAT(func) (expat_capi->func) @@ -2779,6 +2829,7 @@ expat_set_error(enum XML_Error error_code, int line, int column, char *message) { PyObject *errmsg, *error, *position, *code; + elementtreestate *st = ET_STATE_GLOBAL; errmsg = PyUnicode_FromFormat("%s: line %d, column %d", message ? message : EXPAT(ErrorString)(error_code), @@ -2786,7 +2837,7 @@ if (errmsg == NULL) return; - error = PyObject_CallFunction(elementtree_parseerror_obj, "O", errmsg); + error = PyObject_CallFunction(st->parseerror_obj, "O", errmsg); Py_DECREF(errmsg); if (!error) return; @@ -2816,7 +2867,7 @@ } Py_DECREF(position); - PyErr_SetObject(elementtree_parseerror_obj, error); + PyErr_SetObject(st->parseerror_obj, error); Py_DECREF(error); } @@ -3639,22 +3690,29 @@ }; -static struct PyModuleDef _elementtreemodule = { - PyModuleDef_HEAD_INIT, - "_elementtree", - NULL, - -1, - _functions, - NULL, - NULL, - NULL, - NULL +static struct PyModuleDef elementtreemodule = { + PyModuleDef_HEAD_INIT, + "_elementtree", + NULL, + sizeof(elementtreestate), + _functions, + NULL, + elementtree_traverse, + elementtree_clear, + elementtree_free }; PyMODINIT_FUNC PyInit__elementtree(void) { PyObject *m, *temp; + elementtreestate *st; + + m = PyState_FindModule(&elementtreemodule); + if (m) { + Py_INCREF(m); + return m; + } /* Initialize object types */ if (PyType_Ready(&ElementIter_Type) < 0) @@ -3666,16 +3724,17 @@ if (PyType_Ready(&XMLParser_Type) < 0) return NULL; - m = PyModule_Create(&_elementtreemodule); + m = PyModule_Create(&elementtreemodule); if (!m) return NULL; + st = ET_STATE(m); if (!(temp = PyImport_ImportModule("copy"))) return NULL; - elementtree_deepcopy_obj = PyObject_GetAttrString(temp, "deepcopy"); + st->deepcopy_obj = PyObject_GetAttrString(temp, "deepcopy"); Py_XDECREF(temp); - if (!(elementpath_obj = PyImport_ImportModule("xml.etree.ElementPath"))) + if (!(st->elementpath_obj = PyImport_ImportModule("xml.etree.ElementPath"))) return NULL; /* link against pyexpat */ @@ -3695,11 +3754,11 @@ return NULL; } - elementtree_parseerror_obj = PyErr_NewException( + st->parseerror_obj = PyErr_NewException( "xml.etree.ElementTree.ParseError", PyExc_SyntaxError, NULL ); - Py_INCREF(elementtree_parseerror_obj); - PyModule_AddObject(m, "ParseError", elementtree_parseerror_obj); + Py_INCREF(st->parseerror_obj); + PyModule_AddObject(m, "ParseError", st->parseerror_obj); Py_INCREF((PyObject *)&Element_Type); PyModule_AddObject(m, "Element", (PyObject *)&Element_Type); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:09:12 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:09:12 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NDgzOiBhZGQg?= =?utf-8?q?one_more_date_format_in_test=5Fhttp2time=5Fformats=2E__Patch_by?= =?utf-8?q?_Vajrasky?= Message-ID: <3cC6D86ssKzNcC@mail.python.org> http://hg.python.org/cpython/rev/ab8da1936297 changeset: 85084:ab8da1936297 branch: 3.3 parent: 85079:f55ba27776d4 user: Ezio Melotti date: Sat Aug 10 18:07:25 2013 +0300 summary: #18483: add one more date format in test_http2time_formats. Patch by Vajrasky Kok. files: Lib/test/test_http_cookiejar.py | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py --- a/Lib/test/test_http_cookiejar.py +++ b/Lib/test/test_http_cookiejar.py @@ -56,6 +56,8 @@ '03-Feb-1994 00:00:00 GMT', # broken rfc850 (no weekday) '03-Feb-1994 00:00 GMT', # broken rfc850 (no weekday, no seconds) '03-Feb-1994 00:00', # broken rfc850 (no weekday, no seconds, no tz) + '02-Feb-1994 24:00', # broken rfc850 (no weekday, no seconds, + # no tz) using hour 24 with yesterday date '03-Feb-94', # old rfc850 HTTP format (no weekday, no time) '03-Feb-1994', # broken rfc850 HTTP format (no weekday, no time) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:09:14 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:09:14 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NDgzOiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cC6DB1nYkzNxJ@mail.python.org> http://hg.python.org/cpython/rev/5c3708f23351 changeset: 85085:5c3708f23351 parent: 85083:8a060e2de608 parent: 85084:ab8da1936297 user: Ezio Melotti date: Sat Aug 10 18:08:13 2013 +0300 summary: #18483: merge with 3.3. files: Lib/test/test_http_cookiejar.py | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py --- a/Lib/test/test_http_cookiejar.py +++ b/Lib/test/test_http_cookiejar.py @@ -56,6 +56,8 @@ '03-Feb-1994 00:00:00 GMT', # broken rfc850 (no weekday) '03-Feb-1994 00:00 GMT', # broken rfc850 (no weekday, no seconds) '03-Feb-1994 00:00', # broken rfc850 (no weekday, no seconds, no tz) + '02-Feb-1994 24:00', # broken rfc850 (no weekday, no seconds, + # no tz) using hour 24 with yesterday date '03-Feb-94', # old rfc850 HTTP format (no weekday, no time) '03-Feb-1994', # broken rfc850 HTTP format (no weekday, no time) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:21:21 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:21:21 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NDg0OiBpbXBy?= =?utf-8?q?ove_test_coverage_of_http=2Ecookiejar=2E__Patch_by_Vajrasky_Kok?= =?utf-8?q?=2E?= Message-ID: <3cC6V92hcpzPlK@mail.python.org> http://hg.python.org/cpython/rev/fe5d105eba4b changeset: 85086:fe5d105eba4b branch: 3.3 parent: 85084:ab8da1936297 user: Ezio Melotti date: Sat Aug 10 18:20:09 2013 +0300 summary: #18484: improve test coverage of http.cookiejar. Patch by Vajrasky Kok. files: Lib/test/test_http_cookiejar.py | 73 +++++++++++++++++++- 1 files changed, 67 insertions(+), 6 deletions(-) diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py --- a/Lib/test/test_http_cookiejar.py +++ b/Lib/test/test_http_cookiejar.py @@ -7,7 +7,7 @@ import unittest import urllib.request -from http.cookiejar import (time2isoz, http2time, time2netscape, +from http.cookiejar import (time2isoz, http2time, iso2time, time2netscape, parse_ns_headers, join_header_words, split_header_words, Cookie, CookieJar, DefaultCookiePolicy, LWPCookieJar, MozillaCookieJar, LoadError, lwp_cookie_str, DEFAULT_HTTP_PORT, escape_path, @@ -80,7 +80,7 @@ t3 = http2time(s.upper()) self.assertTrue(t == t2 == t3 == test_t, - "'%s' => %s, %s, %s (%s)" % (s, t, t2, t3, test_t)) + "'%s' => %s, %s, %s (%s)" % (s, t, t2, t3, test_t)) def test_http2time_garbage(self): for test in [ @@ -95,10 +95,71 @@ '01-01-1980 00:61:00', '01-01-1980 00:00:62', ]: - self.assertTrue(http2time(test) is None, - "http2time(%s) is not None\n" - "http2time(test) %s" % (test, http2time(test)) - ) + self.assertIsNone(http2time(test), + "http2time(%s) is not None\n" + "http2time(test) %s" % (test, http2time(test))) + + def test_iso2time(self): + def parse_date(text): + return time.gmtime(iso2time(text))[:6] + + # ISO 8601 compact format + self.assertEqual(parse_date("19940203T141529Z"), + (1994, 2, 3, 14, 15, 29)) + + # ISO 8601 with time behind UTC + self.assertEqual(parse_date("1994-02-03 07:15:29 -0700"), + (1994, 2, 3, 14, 15, 29)) + + # ISO 8601 with time ahead of UTC + self.assertEqual(parse_date("1994-02-03 19:45:29 +0530"), + (1994, 2, 3, 14, 15, 29)) + + def test_iso2time_formats(self): + # test iso2time for supported dates. + tests = [ + '1994-02-03 00:00:00 -0000', # ISO 8601 format + '1994-02-03 00:00:00 +0000', # ISO 8601 format + '1994-02-03 00:00:00', # zone is optional + '1994-02-03', # only date + '1994-02-03T00:00:00', # Use T as separator + '19940203', # only date + '1994-02-02 24:00:00', # using hour-24 yesterday date + '19940203T000000Z', # ISO 8601 compact format + + # A few tests with extra space at various places + ' 1994-02-03 ', + ' 1994-02-03T00:00:00 ', + ] + + test_t = 760233600 # assume broken POSIX counting of seconds + for s in tests: + t = iso2time(s) + t2 = iso2time(s.lower()) + t3 = iso2time(s.upper()) + + self.assertTrue(t == t2 == t3 == test_t, + "'%s' => %s, %s, %s (%s)" % (s, t, t2, t3, test_t)) + + def test_iso2time_garbage(self): + for test in [ + '', + 'Garbage', + 'Thursday, 03-Feb-94 00:00:00 GMT', + '1980-00-01', + '1980-13-01', + '1980-01-00', + '1980-01-32', + '1980-01-01 25:00:00', + '1980-01-01 00:61:00', + '01-01-1980 00:00:62', + '01-01-1980T00:00:62', + '19800101T250000Z' + '1980-01-01 00:00:00 -2500', + ]: + self.assertIsNone(iso2time(test), + "iso2time(%s) is not None\n" + "iso2time(test) %s" % (test, iso2time(test))) class HeaderTests(unittest.TestCase): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:21:22 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:21:22 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NDg0OiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cC6VB5wVDzQLG@mail.python.org> http://hg.python.org/cpython/rev/7bf1f8892df5 changeset: 85087:7bf1f8892df5 parent: 85085:5c3708f23351 parent: 85086:fe5d105eba4b user: Ezio Melotti date: Sat Aug 10 18:20:59 2013 +0300 summary: #18484: merge with 3.3. files: Lib/test/test_http_cookiejar.py | 73 +++++++++++++++++++- 1 files changed, 67 insertions(+), 6 deletions(-) diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py --- a/Lib/test/test_http_cookiejar.py +++ b/Lib/test/test_http_cookiejar.py @@ -7,7 +7,7 @@ import unittest import urllib.request -from http.cookiejar import (time2isoz, http2time, time2netscape, +from http.cookiejar import (time2isoz, http2time, iso2time, time2netscape, parse_ns_headers, join_header_words, split_header_words, Cookie, CookieJar, DefaultCookiePolicy, LWPCookieJar, MozillaCookieJar, LoadError, lwp_cookie_str, DEFAULT_HTTP_PORT, escape_path, @@ -80,7 +80,7 @@ t3 = http2time(s.upper()) self.assertTrue(t == t2 == t3 == test_t, - "'%s' => %s, %s, %s (%s)" % (s, t, t2, t3, test_t)) + "'%s' => %s, %s, %s (%s)" % (s, t, t2, t3, test_t)) def test_http2time_garbage(self): for test in [ @@ -95,10 +95,71 @@ '01-01-1980 00:61:00', '01-01-1980 00:00:62', ]: - self.assertTrue(http2time(test) is None, - "http2time(%s) is not None\n" - "http2time(test) %s" % (test, http2time(test)) - ) + self.assertIsNone(http2time(test), + "http2time(%s) is not None\n" + "http2time(test) %s" % (test, http2time(test))) + + def test_iso2time(self): + def parse_date(text): + return time.gmtime(iso2time(text))[:6] + + # ISO 8601 compact format + self.assertEqual(parse_date("19940203T141529Z"), + (1994, 2, 3, 14, 15, 29)) + + # ISO 8601 with time behind UTC + self.assertEqual(parse_date("1994-02-03 07:15:29 -0700"), + (1994, 2, 3, 14, 15, 29)) + + # ISO 8601 with time ahead of UTC + self.assertEqual(parse_date("1994-02-03 19:45:29 +0530"), + (1994, 2, 3, 14, 15, 29)) + + def test_iso2time_formats(self): + # test iso2time for supported dates. + tests = [ + '1994-02-03 00:00:00 -0000', # ISO 8601 format + '1994-02-03 00:00:00 +0000', # ISO 8601 format + '1994-02-03 00:00:00', # zone is optional + '1994-02-03', # only date + '1994-02-03T00:00:00', # Use T as separator + '19940203', # only date + '1994-02-02 24:00:00', # using hour-24 yesterday date + '19940203T000000Z', # ISO 8601 compact format + + # A few tests with extra space at various places + ' 1994-02-03 ', + ' 1994-02-03T00:00:00 ', + ] + + test_t = 760233600 # assume broken POSIX counting of seconds + for s in tests: + t = iso2time(s) + t2 = iso2time(s.lower()) + t3 = iso2time(s.upper()) + + self.assertTrue(t == t2 == t3 == test_t, + "'%s' => %s, %s, %s (%s)" % (s, t, t2, t3, test_t)) + + def test_iso2time_garbage(self): + for test in [ + '', + 'Garbage', + 'Thursday, 03-Feb-94 00:00:00 GMT', + '1980-00-01', + '1980-13-01', + '1980-01-00', + '1980-01-32', + '1980-01-01 25:00:00', + '1980-01-01 00:61:00', + '01-01-1980 00:00:62', + '01-01-1980T00:00:62', + '19800101T250000Z' + '1980-01-01 00:00:00 -2500', + ]: + self.assertIsNone(iso2time(test), + "iso2time(%s) is not None\n" + "iso2time(test) %s" % (test, iso2time(test))) class HeaderTests(unittest.TestCase): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:31:49 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:31:49 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NDY1OiBmaXgg?= =?utf-8?q?unused_variables_in_test=5Fminidom=2E__Patch_by_Vajrasky_Kok=2E?= Message-ID: <3cC6kF694BzPdS@mail.python.org> http://hg.python.org/cpython/rev/4daa18b5ad49 changeset: 85088:4daa18b5ad49 branch: 3.3 parent: 85086:fe5d105eba4b user: Ezio Melotti date: Sat Aug 10 18:30:29 2013 +0300 summary: #18465: fix unused variables in test_minidom. Patch by Vajrasky Kok. files: Lib/test/test_minidom.py | 11 ++++++----- 1 files changed, 6 insertions(+), 5 deletions(-) diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py --- a/Lib/test/test_minidom.py +++ b/Lib/test/test_minidom.py @@ -1,7 +1,7 @@ # test for xml.dom.minidom import pickle -from test.support import verbose, run_unittest, findfile +from test.support import run_unittest, findfile import unittest import xml.dom.minidom @@ -310,9 +310,10 @@ self.confirm(len(child.attributes) == 0 and child.getAttributeNode("spam") is None) dom2 = Document() - child2 = dom2.appendChild(dom.createElement("foo")) - self.assertRaises(xml.dom.NotFoundErr, child.removeAttributeNode, - node) + child2 = dom2.appendChild(dom2.createElement("foo")) + node2 = child2.getAttributeNode("spam") + self.assertRaises(xml.dom.NotFoundErr, child2.removeAttributeNode, + node2) dom.unlink() def testHasAttribute(self): @@ -607,7 +608,7 @@ def testHasChildNodes(self): dom = parseString("") doc = dom.documentElement - self.assertTrue(dom.hasChildNodes()) + self.assertTrue(doc.hasChildNodes()) dom2 = parseString("") doc2 = dom2.documentElement self.assertFalse(doc2.hasChildNodes()) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:31:51 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:31:51 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NDY1OiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cC6kH105HzQJr@mail.python.org> http://hg.python.org/cpython/rev/47770b408321 changeset: 85089:47770b408321 parent: 85087:7bf1f8892df5 parent: 85088:4daa18b5ad49 user: Ezio Melotti date: Sat Aug 10 18:30:57 2013 +0300 summary: #18465: merge with 3.3. files: Lib/test/test_minidom.py | 11 ++++++----- 1 files changed, 6 insertions(+), 5 deletions(-) diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py --- a/Lib/test/test_minidom.py +++ b/Lib/test/test_minidom.py @@ -1,7 +1,7 @@ # test for xml.dom.minidom import pickle -from test.support import verbose, run_unittest, findfile +from test.support import run_unittest, findfile import unittest import xml.dom.minidom @@ -310,9 +310,10 @@ self.confirm(len(child.attributes) == 0 and child.getAttributeNode("spam") is None) dom2 = Document() - child2 = dom2.appendChild(dom.createElement("foo")) - self.assertRaises(xml.dom.NotFoundErr, child.removeAttributeNode, - node) + child2 = dom2.appendChild(dom2.createElement("foo")) + node2 = child2.getAttributeNode("spam") + self.assertRaises(xml.dom.NotFoundErr, child2.removeAttributeNode, + node2) dom.unlink() def testHasAttribute(self): @@ -607,7 +608,7 @@ def testHasChildNodes(self): dom = parseString("") doc = dom.documentElement - self.assertTrue(dom.hasChildNodes()) + self.assertTrue(doc.hasChildNodes()) dom2 = parseString("") doc2 = dom2.documentElement self.assertFalse(doc2.hasChildNodes()) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:37:57 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:37:57 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NDUzOiBmaXgg?= =?utf-8?q?unused_variables_in_test=5Fxmlrpc=2E__Patch_by_Vajrasky_Kok=2E?= Message-ID: <3cC6sK1LLjzQHY@mail.python.org> http://hg.python.org/cpython/rev/28c756093a63 changeset: 85090:28c756093a63 branch: 3.3 parent: 85088:4daa18b5ad49 user: Ezio Melotti date: Sat Aug 10 18:37:05 2013 +0300 summary: #18453: fix unused variables in test_xmlrpc. Patch by Vajrasky Kok. files: Lib/test/test_xmlrpc.py | 12 ++++++++++-- 1 files changed, 10 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py --- a/Lib/test/test_xmlrpc.py +++ b/Lib/test/test_xmlrpc.py @@ -3,6 +3,7 @@ import sys import time import unittest +from unittest import mock import xmlrpc.client as xmlrpclib import xmlrpc.server import http.client @@ -249,7 +250,14 @@ class DateTimeTestCase(unittest.TestCase): def test_default(self): - t = xmlrpclib.DateTime() + with mock.patch('time.localtime') as localtime_mock: + time_struct = time.struct_time( + [2013, 7, 15, 0, 24, 49, 0, 196, 0]) + localtime_mock.return_value = time_struct + localtime = time.localtime() + t = xmlrpclib.DateTime() + self.assertEqual(str(t), + time.strftime("%Y%m%dT%H:%M:%S", localtime)) def test_time(self): d = 1181399930.036952 @@ -286,7 +294,7 @@ self.assertEqual(t1, tref) t2 = xmlrpclib._datetime(d) - self.assertEqual(t1, tref) + self.assertEqual(t2, tref) def test_comparison(self): now = datetime.datetime.now() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:37:58 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:37:58 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NDUzOiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cC6sL3MTJzQbC@mail.python.org> http://hg.python.org/cpython/rev/9f7581816890 changeset: 85091:9f7581816890 parent: 85089:47770b408321 parent: 85090:28c756093a63 user: Ezio Melotti date: Sat Aug 10 18:37:36 2013 +0300 summary: #18453: merge with 3.3. files: Lib/test/test_xmlrpc.py | 12 ++++++++++-- 1 files changed, 10 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py --- a/Lib/test/test_xmlrpc.py +++ b/Lib/test/test_xmlrpc.py @@ -3,6 +3,7 @@ import sys import time import unittest +from unittest import mock import xmlrpc.client as xmlrpclib import xmlrpc.server import http.client @@ -253,7 +254,14 @@ class DateTimeTestCase(unittest.TestCase): def test_default(self): - t = xmlrpclib.DateTime() + with mock.patch('time.localtime') as localtime_mock: + time_struct = time.struct_time( + [2013, 7, 15, 0, 24, 49, 0, 196, 0]) + localtime_mock.return_value = time_struct + localtime = time.localtime() + t = xmlrpclib.DateTime() + self.assertEqual(str(t), + time.strftime("%Y%m%dT%H:%M:%S", localtime)) def test_time(self): d = 1181399930.036952 @@ -290,7 +298,7 @@ self.assertEqual(t1, tref) t2 = xmlrpclib._datetime(d) - self.assertEqual(t1, tref) + self.assertEqual(t2, tref) def test_comparison(self): now = datetime.datetime.now() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:48:32 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:48:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_Fix_a_couple_o?= =?utf-8?q?f_typos=2E?= Message-ID: <3cC75X4BJPzQbC@mail.python.org> http://hg.python.org/cpython/rev/168f6ac90abf changeset: 85092:168f6ac90abf branch: 3.3 parent: 85090:28c756093a63 user: Ezio Melotti date: Sat Aug 10 18:47:07 2013 +0300 summary: Fix a couple of typos. files: Lib/email/architecture.rst | 6 +++--- Lib/email/generator.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Lib/email/architecture.rst b/Lib/email/architecture.rst --- a/Lib/email/architecture.rst +++ b/Lib/email/architecture.rst @@ -24,9 +24,9 @@ Conceptually the package is organized around the model. The model provides both "external" APIs intended for use by application programs using the library, and "internal" APIs intended for use by the Parser and Generator components. -This division is intentionally a bit fuzy; the API described by this documentation -is all a public, stable API. This allows for an application with special needs -to implement its own parser and/or generator. +This division is intentionally a bit fuzzy; the API described by this +documentation is all a public, stable API. This allows for an application +with special needs to implement its own parser and/or generator. In addition to the three major functional components, there is a third key component to the architecture: diff --git a/Lib/email/generator.py b/Lib/email/generator.py --- a/Lib/email/generator.py +++ b/Lib/email/generator.py @@ -349,7 +349,7 @@ # This used to be a module level function; we use a classmethod for this # and _compile_re so we can continue to provide the module level function # for backward compatibility by doing - # _make_boudary = Generator._make_boundary + # _make_boundary = Generator._make_boundary # at the end of the module. It *is* internal, so we could drop that... @classmethod def _make_boundary(cls, text=None): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:48:33 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:48:33 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_typo_fixes_from_3=2E3=2E?= Message-ID: <3cC75Y66yWzQbC@mail.python.org> http://hg.python.org/cpython/rev/40ef5ce25d08 changeset: 85093:40ef5ce25d08 parent: 85091:9f7581816890 parent: 85092:168f6ac90abf user: Ezio Melotti date: Sat Aug 10 18:47:37 2013 +0300 summary: Merge typo fixes from 3.3. files: Lib/email/architecture.rst | 6 +++--- Lib/email/generator.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Lib/email/architecture.rst b/Lib/email/architecture.rst --- a/Lib/email/architecture.rst +++ b/Lib/email/architecture.rst @@ -24,9 +24,9 @@ Conceptually the package is organized around the model. The model provides both "external" APIs intended for use by application programs using the library, and "internal" APIs intended for use by the Parser and Generator components. -This division is intentionally a bit fuzy; the API described by this documentation -is all a public, stable API. This allows for an application with special needs -to implement its own parser and/or generator. +This division is intentionally a bit fuzzy; the API described by this +documentation is all a public, stable API. This allows for an application +with special needs to implement its own parser and/or generator. In addition to the three major functional components, there is a third key component to the architecture: diff --git a/Lib/email/generator.py b/Lib/email/generator.py --- a/Lib/email/generator.py +++ b/Lib/email/generator.py @@ -349,7 +349,7 @@ # This used to be a module level function; we use a classmethod for this # and _compile_re so we can continue to provide the module level function # for backward compatibility by doing - # _make_boudary = Generator._make_boundary + # _make_boundary = Generator._make_boundary # at the end of the module. It *is* internal, so we could drop that... @classmethod def _make_boundary(cls, text=None): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:58:08 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:58:08 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NTA1OiBmaXgg?= =?utf-8?q?duplicate_name_and_remove_duplicate_test=2E__Patch_by_Vajrasky_?= =?utf-8?q?Kok=2E?= Message-ID: <3cC7Jc3Yw8zQyZ@mail.python.org> http://hg.python.org/cpython/rev/53d54503fc06 changeset: 85094:53d54503fc06 branch: 3.3 parent: 85092:168f6ac90abf user: Ezio Melotti date: Sat Aug 10 18:57:12 2013 +0300 summary: #18505: fix duplicate name and remove duplicate test. Patch by Vajrasky Kok. files: Lib/test/test_email/test_email.py | 5 +---- 1 files changed, 1 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -926,7 +926,7 @@ This is a long line that has two whitespaces in a row. This used to cause truncation of the header when folded""") - def test_splitter_split_on_punctuation_only_if_fws(self): + def test_splitter_split_on_punctuation_only_if_fws_with_header(self): eq = self.ndiffAssertEqual h = Header('thisverylongheaderhas;semicolons;and,commas,but' 'they;arenotlegal;fold,points') @@ -4214,9 +4214,6 @@ self._test_encode('x' * 200 + '\n', 2 * ('x' * 75 + '=\n') + 'x' * 50 + '\n') - def test_encode_one_long_line(self): - self._test_encode('x' * 100 + '\n', 'x' * 75 + '=\n' + 'x' * 25 + '\n') - def test_encode_shortest_maxlinelen(self): self._test_encode('=' * 5, '=3D=\n' * 4 + '=3D', maxlinelen=4) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 17:58:09 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 17:58:09 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NTA1OiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cC7Jd5TpMzRCZ@mail.python.org> http://hg.python.org/cpython/rev/cb0fba5c7828 changeset: 85095:cb0fba5c7828 parent: 85093:40ef5ce25d08 parent: 85094:53d54503fc06 user: Ezio Melotti date: Sat Aug 10 18:57:52 2013 +0300 summary: #18505: merge with 3.3. files: Lib/test/test_email/test_email.py | 5 +---- 1 files changed, 1 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -953,7 +953,7 @@ This is a long line that has two whitespaces in a row. This used to cause truncation of the header when folded""") - def test_splitter_split_on_punctuation_only_if_fws(self): + def test_splitter_split_on_punctuation_only_if_fws_with_header(self): eq = self.ndiffAssertEqual h = Header('thisverylongheaderhas;semicolons;and,commas,but' 'they;arenotlegal;fold,points') @@ -4241,9 +4241,6 @@ self._test_encode('x' * 200 + '\n', 2 * ('x' * 75 + '=\n') + 'x' * 50 + '\n') - def test_encode_one_long_line(self): - self._test_encode('x' * 100 + '\n', 'x' * 75 + '=\n' + 'x' * 25 + '\n') - def test_encode_shortest_maxlinelen(self): self._test_encode('=' * 5, '=3D=\n' * 4 + '=3D', maxlinelen=4) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 18:15:51 2013 From: python-checkins at python.org (r.david.murray) Date: Sat, 10 Aug 2013 18:15:51 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzgxMTI6IFVwZGF0?= =?utf-8?q?e_the_documenting_xmlrpc_server_to_use_getfullargspec=2E?= Message-ID: <3cC7j320bpzNRD@mail.python.org> http://hg.python.org/cpython/rev/bc49e82ee013 changeset: 85096:bc49e82ee013 branch: 3.3 parent: 85090:28c756093a63 user: R David Murray date: Sat Aug 10 12:01:47 2013 -0400 summary: #8112: Update the documenting xmlrpc server to use getfullargspec. Before this patch it would raise an error when trying to display documentation for a method that used annotations. Patch by Claudiu Popa. files: Lib/test/test_docxmlrpc.py | 27 ++++++++++++++++++++++--- Lib/xmlrpc/server.py | 17 +++++++++------ Misc/NEWS | 3 ++ 3 files changed, 36 insertions(+), 11 deletions(-) diff --git a/Lib/test/test_docxmlrpc.py b/Lib/test/test_docxmlrpc.py --- a/Lib/test/test_docxmlrpc.py +++ b/Lib/test/test_docxmlrpc.py @@ -54,8 +54,18 @@ """ return x + y + def annotation(x: int): + """ Use function annotations. """ + return x + + class ClassWithAnnotation: + def method_annotation(self, x: bytes): + return x.decode() + serv.register_function(add) serv.register_function(lambda x, y: x-y) + serv.register_function(annotation) + serv.register_instance(ClassWithAnnotation()) while numrequests > 0: serv.handle_request() @@ -177,10 +187,7 @@ b'method takes two integers as arguments' b'
\nand returns a double result.
\n ' b'
\nThis server does NOT support system' - b'.methodSignature.\n
' - b'test_method(arg)
Test ' - b'method\'s docs. This method truly does' - b' very little.
'), response) + b'.methodSignature.'), response) def test_autolink_dotted_methods(self): """Test that selfdot values are made strong automatically in the @@ -191,6 +198,18 @@ self.assertIn(b"""Try self.add, too.""", response.read()) + def test_annotations(self): + """ Test that annotations works as expected """ + self.client.request("GET", "/") + response = self.client.getresponse() + self.assertIn( + (b'
annotation' + b'(x: int)
Use function annotations.' + b'
\n
' + b'method_annotation(x: bytes)
'), + response.read()) + + def test_main(): support.run_unittest(DocXMLRPCHTTPGETServer) diff --git a/Lib/xmlrpc/server.py b/Lib/xmlrpc/server.py --- a/Lib/xmlrpc/server.py +++ b/Lib/xmlrpc/server.py @@ -756,20 +756,23 @@ self.escape(anchor), self.escape(name)) if inspect.ismethod(object): - args, varargs, varkw, defaults = inspect.getargspec(object) + args = inspect.getfullargspec(object) # exclude the argument bound to the instance, it will be # confusing to the non-Python user argspec = inspect.formatargspec ( - args[1:], - varargs, - varkw, - defaults, + args.args[1:], + args.varargs, + args.varkw, + args.defaults, + annotations=args.annotations, formatvalue=self.formatvalue ) elif inspect.isfunction(object): - args, varargs, varkw, defaults = inspect.getargspec(object) + args = inspect.getfullargspec(object) argspec = inspect.formatargspec( - args, varargs, varkw, defaults, formatvalue=self.formatvalue) + args.args, args.varargs, args.varkw, args.defaults, + annotations=args.annotations, + formatvalue=self.formatvalue) else: argspec = '(...)' diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -64,6 +64,9 @@ Library ------- +- Issue #8112: xlmrpc.server's DocXMLRPCServer server no longer raises an error + if methods have annotations; it now correctly displays the annotations. + - Issue #17998: Fix an internal error in regular expression engine. - Issue #17557: Fix os.getgroups() to work with the modified behavior of -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 18:15:52 2013 From: python-checkins at python.org (r.david.murray) Date: Sat, 10 Aug 2013 18:15:52 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_=238112=3A_Update_the_documenting_xmlrpc_server_to?= =?utf-8?q?_use_getfullargspec=2E?= Message-ID: <3cC7j457Y2zR6S@mail.python.org> http://hg.python.org/cpython/rev/69e515209fa9 changeset: 85097:69e515209fa9 parent: 85091:9f7581816890 parent: 85096:bc49e82ee013 user: R David Murray date: Sat Aug 10 12:03:54 2013 -0400 summary: Merge #8112: Update the documenting xmlrpc server to use getfullargspec. files: Lib/test/test_docxmlrpc.py | 27 ++++++++++++++++++++++--- Lib/xmlrpc/server.py | 17 +++++++++------ Misc/NEWS | 3 ++ 3 files changed, 36 insertions(+), 11 deletions(-) diff --git a/Lib/test/test_docxmlrpc.py b/Lib/test/test_docxmlrpc.py --- a/Lib/test/test_docxmlrpc.py +++ b/Lib/test/test_docxmlrpc.py @@ -54,8 +54,18 @@ """ return x + y + def annotation(x: int): + """ Use function annotations. """ + return x + + class ClassWithAnnotation: + def method_annotation(self, x: bytes): + return x.decode() + serv.register_function(add) serv.register_function(lambda x, y: x-y) + serv.register_function(annotation) + serv.register_instance(ClassWithAnnotation()) while numrequests > 0: serv.handle_request() @@ -177,10 +187,7 @@ b'method takes two integers as arguments' b'
\nand returns a double result.
\n ' b'
\nThis server does NOT support system' - b'.methodSignature.\n
' - b'test_method(arg)
Test ' - b'method\'s docs. This method truly does' - b' very little.
'), response) + b'.methodSignature.'), response) def test_autolink_dotted_methods(self): """Test that selfdot values are made strong automatically in the @@ -191,6 +198,18 @@ self.assertIn(b"""Try self.add, too.""", response.read()) + def test_annotations(self): + """ Test that annotations works as expected """ + self.client.request("GET", "/") + response = self.client.getresponse() + self.assertIn( + (b'
annotation' + b'(x: int)
Use function annotations.' + b'
\n
' + b'method_annotation(x: bytes)
'), + response.read()) + + def test_main(): support.run_unittest(DocXMLRPCHTTPGETServer) diff --git a/Lib/xmlrpc/server.py b/Lib/xmlrpc/server.py --- a/Lib/xmlrpc/server.py +++ b/Lib/xmlrpc/server.py @@ -756,20 +756,23 @@ self.escape(anchor), self.escape(name)) if inspect.ismethod(object): - args, varargs, varkw, defaults = inspect.getargspec(object) + args = inspect.getfullargspec(object) # exclude the argument bound to the instance, it will be # confusing to the non-Python user argspec = inspect.formatargspec ( - args[1:], - varargs, - varkw, - defaults, + args.args[1:], + args.varargs, + args.varkw, + args.defaults, + annotations=args.annotations, formatvalue=self.formatvalue ) elif inspect.isfunction(object): - args, varargs, varkw, defaults = inspect.getargspec(object) + args = inspect.getfullargspec(object) argspec = inspect.formatargspec( - args, varargs, varkw, defaults, formatvalue=self.formatvalue) + args.args, args.varargs, args.varkw, args.defaults, + annotations=args.annotations, + formatvalue=self.formatvalue) else: argspec = '(...)' diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -24,6 +24,9 @@ Library ------- +- Issue #8112: xlmrpc.server's DocXMLRPCServer server no longer raises an error + if methods have annotations; it now correctly displays the annotations. + - Issue #18600: Added policy argument to email.message.Message.as_string, and as_bytes and __bytes__ methods to Message. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 18:15:54 2013 From: python-checkins at python.org (r.david.murray) Date: Sat, 10 Aug 2013 18:15:54 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAobWVyZ2UgMy4zIC0+IDMuMyk6?= =?utf-8?q?_Merge_heads=2E?= Message-ID: <3cC7j615YSzRCZ@mail.python.org> http://hg.python.org/cpython/rev/54998d3d5587 changeset: 85098:54998d3d5587 branch: 3.3 parent: 85094:53d54503fc06 parent: 85096:bc49e82ee013 user: R David Murray date: Sat Aug 10 12:08:41 2013 -0400 summary: Merge heads. files: Lib/test/test_docxmlrpc.py | 27 ++++++++++++++++++++++--- Lib/xmlrpc/server.py | 17 +++++++++------ Misc/NEWS | 3 ++ 3 files changed, 36 insertions(+), 11 deletions(-) diff --git a/Lib/test/test_docxmlrpc.py b/Lib/test/test_docxmlrpc.py --- a/Lib/test/test_docxmlrpc.py +++ b/Lib/test/test_docxmlrpc.py @@ -54,8 +54,18 @@ """ return x + y + def annotation(x: int): + """ Use function annotations. """ + return x + + class ClassWithAnnotation: + def method_annotation(self, x: bytes): + return x.decode() + serv.register_function(add) serv.register_function(lambda x, y: x-y) + serv.register_function(annotation) + serv.register_instance(ClassWithAnnotation()) while numrequests > 0: serv.handle_request() @@ -177,10 +187,7 @@ b'method takes two integers as arguments' b'
\nand returns a double result.
\n ' b'
\nThis server does NOT support system' - b'.methodSignature.\n
' - b'test_method(arg)
Test ' - b'method\'s docs. This method truly does' - b' very little.
'), response) + b'.methodSignature.'), response) def test_autolink_dotted_methods(self): """Test that selfdot values are made strong automatically in the @@ -191,6 +198,18 @@ self.assertIn(b"""Try self.add, too.""", response.read()) + def test_annotations(self): + """ Test that annotations works as expected """ + self.client.request("GET", "/") + response = self.client.getresponse() + self.assertIn( + (b'
annotation' + b'(x: int)
Use function annotations.' + b'
\n
' + b'method_annotation(x: bytes)
'), + response.read()) + + def test_main(): support.run_unittest(DocXMLRPCHTTPGETServer) diff --git a/Lib/xmlrpc/server.py b/Lib/xmlrpc/server.py --- a/Lib/xmlrpc/server.py +++ b/Lib/xmlrpc/server.py @@ -756,20 +756,23 @@ self.escape(anchor), self.escape(name)) if inspect.ismethod(object): - args, varargs, varkw, defaults = inspect.getargspec(object) + args = inspect.getfullargspec(object) # exclude the argument bound to the instance, it will be # confusing to the non-Python user argspec = inspect.formatargspec ( - args[1:], - varargs, - varkw, - defaults, + args.args[1:], + args.varargs, + args.varkw, + args.defaults, + annotations=args.annotations, formatvalue=self.formatvalue ) elif inspect.isfunction(object): - args, varargs, varkw, defaults = inspect.getargspec(object) + args = inspect.getfullargspec(object) argspec = inspect.formatargspec( - args, varargs, varkw, defaults, formatvalue=self.formatvalue) + args.args, args.varargs, args.varkw, args.defaults, + annotations=args.annotations, + formatvalue=self.formatvalue) else: argspec = '(...)' diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -64,6 +64,9 @@ Library ------- +- Issue #8112: xlmrpc.server's DocXMLRPCServer server no longer raises an error + if methods have annotations; it now correctly displays the annotations. + - Issue #17998: Fix an internal error in regular expression engine. - Issue #17557: Fix os.getgroups() to work with the modified behavior of -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 18:15:55 2013 From: python-checkins at python.org (r.david.murray) Date: Sat, 10 Aug 2013 18:15:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_default_-=3E_default?= =?utf-8?b?KTogTWVyZ2UgaGVhZHMu?= Message-ID: <3cC7j747g6zRMx@mail.python.org> http://hg.python.org/cpython/rev/0077bd815c73 changeset: 85099:0077bd815c73 parent: 85095:cb0fba5c7828 parent: 85097:69e515209fa9 user: R David Murray date: Sat Aug 10 12:13:44 2013 -0400 summary: Merge heads. files: Lib/test/test_docxmlrpc.py | 27 ++++++++++++++++++++++--- Lib/xmlrpc/server.py | 17 +++++++++------ Misc/NEWS | 3 ++ 3 files changed, 36 insertions(+), 11 deletions(-) diff --git a/Lib/test/test_docxmlrpc.py b/Lib/test/test_docxmlrpc.py --- a/Lib/test/test_docxmlrpc.py +++ b/Lib/test/test_docxmlrpc.py @@ -54,8 +54,18 @@ """ return x + y + def annotation(x: int): + """ Use function annotations. """ + return x + + class ClassWithAnnotation: + def method_annotation(self, x: bytes): + return x.decode() + serv.register_function(add) serv.register_function(lambda x, y: x-y) + serv.register_function(annotation) + serv.register_instance(ClassWithAnnotation()) while numrequests > 0: serv.handle_request() @@ -177,10 +187,7 @@ b'method takes two integers as arguments' b'
\nand returns a double result.
\n ' b'
\nThis server does NOT support system' - b'.methodSignature.\n
' - b'test_method(arg)
Test ' - b'method\'s docs. This method truly does' - b' very little.
'), response) + b'.methodSignature.'), response) def test_autolink_dotted_methods(self): """Test that selfdot values are made strong automatically in the @@ -191,6 +198,18 @@ self.assertIn(b"""Try self.add, too.""", response.read()) + def test_annotations(self): + """ Test that annotations works as expected """ + self.client.request("GET", "/") + response = self.client.getresponse() + self.assertIn( + (b'
annotation' + b'(x: int)
Use function annotations.' + b'
\n
' + b'method_annotation(x: bytes)
'), + response.read()) + + def test_main(): support.run_unittest(DocXMLRPCHTTPGETServer) diff --git a/Lib/xmlrpc/server.py b/Lib/xmlrpc/server.py --- a/Lib/xmlrpc/server.py +++ b/Lib/xmlrpc/server.py @@ -756,20 +756,23 @@ self.escape(anchor), self.escape(name)) if inspect.ismethod(object): - args, varargs, varkw, defaults = inspect.getargspec(object) + args = inspect.getfullargspec(object) # exclude the argument bound to the instance, it will be # confusing to the non-Python user argspec = inspect.formatargspec ( - args[1:], - varargs, - varkw, - defaults, + args.args[1:], + args.varargs, + args.varkw, + args.defaults, + annotations=args.annotations, formatvalue=self.formatvalue ) elif inspect.isfunction(object): - args, varargs, varkw, defaults = inspect.getargspec(object) + args = inspect.getfullargspec(object) argspec = inspect.formatargspec( - args, varargs, varkw, defaults, formatvalue=self.formatvalue) + args.args, args.varargs, args.varkw, args.defaults, + annotations=args.annotations, + formatvalue=self.formatvalue) else: argspec = '(...)' diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -24,6 +24,9 @@ Library ------- +- Issue #8112: xlmrpc.server's DocXMLRPCServer server no longer raises an error + if methods have annotations; it now correctly displays the annotations. + - Issue #18600: Added policy argument to email.message.Message.as_string, and as_bytes and __bytes__ methods to Message. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 18:15:56 2013 From: python-checkins at python.org (r.david.murray) Date: Sat, 10 Aug 2013 18:15:56 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Null_merge_of_3=2E3_merge_heads=2E?= Message-ID: <3cC7j85v6HzRMx@mail.python.org> http://hg.python.org/cpython/rev/8215bc8ab617 changeset: 85100:8215bc8ab617 parent: 85099:0077bd815c73 parent: 85098:54998d3d5587 user: R David Murray date: Sat Aug 10 12:14:58 2013 -0400 summary: Null merge of 3.3 merge heads. files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 19:02:44 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 19:02:44 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NjgxOiBGaXgg?= =?utf-8?q?a_NameError_in_imp=2Ereload=28=29_=28noticed_by_Weizhao_Li=29?= =?utf-8?q?=2E?= Message-ID: <3cC8l86ZbDzQCy@mail.python.org> http://hg.python.org/cpython/rev/80b65aa2d579 changeset: 85101:80b65aa2d579 branch: 3.3 parent: 85098:54998d3d5587 user: Ezio Melotti date: Sat Aug 10 19:59:36 2013 +0300 summary: #18681: Fix a NameError in imp.reload() (noticed by Weizhao Li). files: Lib/imp.py | 2 +- Lib/test/test_imp.py | 9 +++++++++ Misc/NEWS | 2 ++ 3 files changed, 12 insertions(+), 1 deletions(-) diff --git a/Lib/imp.py b/Lib/imp.py --- a/Lib/imp.py +++ b/Lib/imp.py @@ -267,7 +267,7 @@ parent_name = name.rpartition('.')[0] if parent_name and parent_name not in sys.modules: msg = "parent {!r} not in sys.modules" - raise ImportError(msg.format(parentname), name=parent_name) + raise ImportError(msg.format(parent_name), name=parent_name) return module.__loader__.load_module(name) finally: try: diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -275,6 +275,15 @@ import marshal imp.reload(marshal) + def test_with_deleted_parent(self): + # see #18681 + from html import parser + del sys.modules['html'] + def cleanup(): del sys.modules['html.parser'] + self.addCleanup(cleanup) + with self.assertRaisesRegex(ImportError, 'html'): + imp.reload(parser) + class PEP3147Tests(unittest.TestCase): """Tests of PEP 3147.""" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -64,6 +64,8 @@ Library ------- +- Issue #18681: Fix a NameError in imp.reload() (noticed by Weizhao Li). + - Issue #8112: xlmrpc.server's DocXMLRPCServer server no longer raises an error if methods have annotations; it now correctly displays the annotations. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 19:02:46 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 10 Aug 2013 19:02:46 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NjgxOiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cC8lB1cMqzQLG@mail.python.org> http://hg.python.org/cpython/rev/6bee7f1061e0 changeset: 85102:6bee7f1061e0 parent: 85100:8215bc8ab617 parent: 85101:80b65aa2d579 user: Ezio Melotti date: Sat Aug 10 20:01:43 2013 +0300 summary: #18681: merge with 3.3. files: Lib/importlib/__init__.py | 2 +- Lib/test/test_imp.py | 9 +++++++++ Misc/NEWS | 2 ++ 3 files changed, 12 insertions(+), 1 deletions(-) diff --git a/Lib/importlib/__init__.py b/Lib/importlib/__init__.py --- a/Lib/importlib/__init__.py +++ b/Lib/importlib/__init__.py @@ -115,7 +115,7 @@ parent_name = name.rpartition('.')[0] if parent_name and parent_name not in sys.modules: msg = "parent {!r} not in sys.modules" - raise ImportError(msg.format(parentname), name=parent_name) + raise ImportError(msg.format(parent_name), name=parent_name) return module.__loader__.load_module(name) finally: try: diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -314,6 +314,15 @@ import marshal imp.reload(marshal) + def test_with_deleted_parent(self): + # see #18681 + from html import parser + del sys.modules['html'] + def cleanup(): del sys.modules['html.parser'] + self.addCleanup(cleanup) + with self.assertRaisesRegex(ImportError, 'html'): + imp.reload(parser) + class PEP3147Tests(unittest.TestCase): """Tests of PEP 3147.""" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -221,6 +221,8 @@ Library ------- +- Issue #18681: Fix a NameError in importlib.reload() (noticed by Weizhao Li). + - Issue #14323: Expanded the number of digits in the coefficients for the RGB -- YIQ conversions so that they match the FCC NTSC versions. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 22:01:55 2013 From: python-checkins at python.org (ethan.furman) Date: Sat, 10 Aug 2013 22:01:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Close_=2318264=3A_int-_and?= =?utf-8?q?_float-derived_enums_now_converted_to_int_or_float=2E?= Message-ID: <3cCDjv022nzNWy@mail.python.org> http://hg.python.org/cpython/rev/ae1a7c420f08 changeset: 85103:ae1a7c420f08 user: Ethan Furman date: Sat Aug 10 13:01:45 2013 -0700 summary: Close #18264: int- and float-derived enums now converted to int or float. files: Doc/library/json.rst | 37 +++++---- Lib/json/encoder.py | 27 ++++-- Lib/test/test_json/test_enum.py | 81 +++++++++++++++++++++ Modules/_json.c | 65 +++++++++++++++- 4 files changed, 178 insertions(+), 32 deletions(-) diff --git a/Doc/library/json.rst b/Doc/library/json.rst --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -349,23 +349,26 @@ .. _py-to-json-table: - +-------------------+---------------+ - | Python | JSON | - +===================+===============+ - | dict | object | - +-------------------+---------------+ - | list, tuple | array | - +-------------------+---------------+ - | str | string | - +-------------------+---------------+ - | int, float | number | - +-------------------+---------------+ - | True | true | - +-------------------+---------------+ - | False | false | - +-------------------+---------------+ - | None | null | - +-------------------+---------------+ + +----------------------------------------+---------------+ + | Python | JSON | + +========================================+===============+ + | dict | object | + +----------------------------------------+---------------+ + | list, tuple | array | + +----------------------------------------+---------------+ + | str | string | + +----------------------------------------+---------------+ + | int, float, int- & float-derived Enums | number | + +----------------------------------------+---------------+ + | True | true | + +----------------------------------------+---------------+ + | False | false | + +----------------------------------------+---------------+ + | None | null | + +----------------------------------------+---------------+ + + .. versionchanged:: 3.4 + Added support for int- and float-derived Enum classes. To extend this to recognize other objects, subclass and implement a :meth:`default` method with another method that returns a serializable object diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py --- a/Lib/json/encoder.py +++ b/Lib/json/encoder.py @@ -175,6 +175,7 @@ def encode(self, o): """Return a JSON string representation of a Python data structure. + >>> from json.encoder import JSONEncoder >>> JSONEncoder().encode({"foo": ["bar", "baz"]}) '{"foo": ["bar", "baz"]}' @@ -298,9 +299,13 @@ elif value is False: yield buf + 'false' elif isinstance(value, int): - yield buf + str(value) + # Subclasses of int/float may override __str__, but we still + # want to encode them as integers/floats in JSON. One example + # within the standard library is IntEnum. + yield buf + str(int(value)) elif isinstance(value, float): - yield buf + _floatstr(value) + # see comment above for int + yield buf + _floatstr(float(value)) else: yield buf if isinstance(value, (list, tuple)): @@ -346,7 +351,8 @@ # JavaScript is weakly typed for these, so it makes sense to # also allow them. Many encoders seem to do something like this. elif isinstance(key, float): - key = _floatstr(key) + # see comment for int/float in _make_iterencode + key = _floatstr(float(key)) elif key is True: key = 'true' elif key is False: @@ -354,7 +360,8 @@ elif key is None: key = 'null' elif isinstance(key, int): - key = str(key) + # see comment for int/float in _make_iterencode + key = str(int(key)) elif _skipkeys: continue else: @@ -374,9 +381,11 @@ elif value is False: yield 'false' elif isinstance(value, int): - yield str(value) + # see comment for int/float in _make_iterencode + yield str(int(value)) elif isinstance(value, float): - yield _floatstr(value) + # see comment for int/float in _make_iterencode + yield _floatstr(float(value)) else: if isinstance(value, (list, tuple)): chunks = _iterencode_list(value, _current_indent_level) @@ -402,9 +411,11 @@ elif o is False: yield 'false' elif isinstance(o, int): - yield str(o) + # see comment for int/float in _make_iterencode + yield str(int(o)) elif isinstance(o, float): - yield _floatstr(o) + # see comment for int/float in _make_iterencode + yield _floatstr(float(o)) elif isinstance(o, (list, tuple)): yield from _iterencode_list(o, _current_indent_level) elif isinstance(o, dict): diff --git a/Lib/test/test_json/test_enum.py b/Lib/test/test_json/test_enum.py new file mode 100644 --- /dev/null +++ b/Lib/test/test_json/test_enum.py @@ -0,0 +1,81 @@ +from enum import Enum, IntEnum +from test.test_json import PyTest, CTest + +SMALL = 1 +BIG = 1<<32 +HUGE = 1<<64 +REALLY_HUGE = 1<<96 + +class BigNum(IntEnum): + small = SMALL + big = BIG + huge = HUGE + really_huge = REALLY_HUGE + +E = 2.718281 +PI = 3.141593 +TAU = 2 * PI + +class FloatNum(float, Enum): + e = E + pi = PI + tau = TAU + +class TestEnum: + + def test_floats(self): + for enum in FloatNum: + self.assertEqual(self.dumps(enum), repr(enum.value)) + self.assertEqual(float(self.dumps(enum)), enum) + self.assertEqual(self.loads(self.dumps(enum)), enum) + + def test_ints(self): + for enum in BigNum: + self.assertEqual(self.dumps(enum), str(enum.value)) + self.assertEqual(int(self.dumps(enum)), enum) + self.assertEqual(self.loads(self.dumps(enum)), enum) + + def test_list(self): + self.assertEqual( + self.dumps(list(BigNum)), + str([SMALL, BIG, HUGE, REALLY_HUGE]), + ) + self.assertEqual(self.dumps(list(FloatNum)), str([E, PI, TAU])) + + def test_dict_keys(self): + s, b, h, r = BigNum + e, p, t = FloatNum + d = { + s:'tiny', b:'large', h:'larger', r:'largest', + e:"Euler's number", p:'pi', t:'tau', + } + nd = self.loads(self.dumps(d)) + self.assertEqual(nd[str(SMALL)], 'tiny') + self.assertEqual(nd[str(BIG)], 'large') + self.assertEqual(nd[str(HUGE)], 'larger') + self.assertEqual(nd[str(REALLY_HUGE)], 'largest') + self.assertEqual(nd[repr(E)], "Euler's number") + self.assertEqual(nd[repr(PI)], 'pi') + self.assertEqual(nd[repr(TAU)], 'tau') + + def test_dict_values(self): + d = dict( + tiny=BigNum.small, + large=BigNum.big, + larger=BigNum.huge, + largest=BigNum.really_huge, + e=FloatNum.e, + pi=FloatNum.pi, + tau=FloatNum.tau, + ) + nd = self.loads(self.dumps(d)) + self.assertEqual(nd['tiny'], SMALL) + self.assertEqual(nd['large'], BIG) + self.assertEqual(nd['larger'], HUGE) + self.assertEqual(nd['largest'], REALLY_HUGE) + self.assertEqual(nd['e'], E) + self.assertEqual(nd['pi'], PI) + self.assertEqual(nd['tau'], TAU) + +class TestPyEnum(TestEnum, PyTest): pass +class TestCEnum(TestEnum, CTest): pass diff --git a/Modules/_json.c b/Modules/_json.c --- a/Modules/_json.c +++ b/Modules/_json.c @@ -116,6 +116,8 @@ static PyObject * encoder_encode_string(PyEncoderObject *s, PyObject *obj); static PyObject * +encoder_encode_long(PyEncoderObject* s UNUSED, PyObject *obj); +static PyObject * encoder_encode_float(PyEncoderObject *s, PyObject *obj); #define S_CHAR(c) (c >= ' ' && c <= '~' && c != '\\' && c != '"') @@ -1302,13 +1304,45 @@ } static PyObject * +encoder_encode_long(PyEncoderObject* s UNUSED, PyObject *obj) +{ + /* Return the JSON representation of a PyLong and PyLong subclasses. + Calls int() on PyLong subclasses in case the str() was changed. + Added specifically to deal with IntEnum. See Issue18264. */ + PyObject *encoded, *longobj; + if (PyLong_CheckExact(obj)) { + encoded = PyObject_Str(obj); + } + else { + longobj = PyNumber_Long(obj); + if (longobj == NULL) { + PyErr_SetString( + PyExc_ValueError, + "Unable to coerce int subclass to int" + ); + return NULL; + } + encoded = PyObject_Str(longobj); + Py_DECREF(longobj); + } + return encoded; +} + + +static PyObject * encoder_encode_float(PyEncoderObject *s, PyObject *obj) { - /* Return the JSON representation of a PyFloat */ + /* Return the JSON representation of a PyFloat. + Modified to call float() on float subclasses in case the subclass + changes the repr. See Issue18264. */ + PyObject *encoded, *floatobj; double i = PyFloat_AS_DOUBLE(obj); if (!Py_IS_FINITE(i)) { if (!s->allow_nan) { - PyErr_SetString(PyExc_ValueError, "Out of range float values are not JSON compliant"); + PyErr_SetString( + PyExc_ValueError, + "Out of range float values are not JSON compliant" + ); return NULL; } if (i > 0) { @@ -1321,8 +1355,24 @@ return PyUnicode_FromString("NaN"); } } - /* Use a better float format here? */ - return PyObject_Repr(obj); + /* coerce float subclasses to float (primarily for Enum) */ + if (PyFloat_CheckExact(obj)) { + /* Use a better float format here? */ + encoded = PyObject_Repr(obj); + } + else { + floatobj = PyNumber_Float(obj); + if (floatobj == NULL) { + PyErr_SetString( + PyExc_ValueError, + "Unable to coerce float subclass to float" + ); + return NULL; + } + encoded = PyObject_Repr(floatobj); + Py_DECREF(floatobj); + } + return encoded; } static PyObject * @@ -1366,7 +1416,7 @@ return _steal_accumulate(acc, encoded); } else if (PyLong_Check(obj)) { - PyObject *encoded = PyObject_Str(obj); + PyObject *encoded = encoder_encode_long(s, obj); if (encoded == NULL) return -1; return _steal_accumulate(acc, encoded); @@ -1551,9 +1601,10 @@ goto bail; } else if (PyLong_Check(key)) { - kstr = PyObject_Str(key); - if (kstr == NULL) + kstr = encoder_encode_long(s, key); + if (kstr == NULL) { goto bail; + } } else if (skipkeys) { Py_DECREF(item); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 22:07:58 2013 From: python-checkins at python.org (ethan.furman) Date: Sat, 10 Aug 2013 22:07:58 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbjogQ2xvc2UgIzE4NjkzOiBfX2Rp?= =?utf-8?q?r=5F=5F_removed_from_Enum=3B_help=28=29_now_helpful=2E?= Message-ID: <3cCDrt4yvZzPN5@mail.python.org> http://hg.python.org/cpython/rev/5d417257748e changeset: 85104:5d417257748e user: Ethan Furman date: Sat Aug 10 13:07:49 2013 -0700 summary: Close #18693: __dir__ removed from Enum; help() now helpful. files: Lib/enum.py | 6 ------ Lib/test/test_enum.py | 15 --------------- 2 files changed, 0 insertions(+), 21 deletions(-) diff --git a/Lib/enum.py b/Lib/enum.py --- a/Lib/enum.py +++ b/Lib/enum.py @@ -223,9 +223,6 @@ def __contains__(cls, member): return isinstance(member, cls) and member.name in cls._member_map_ - def __dir__(self): - return ['__class__', '__doc__', '__members__'] + self._member_names_ - @property def __members__(cls): """Returns a mapping of member name->value. @@ -433,9 +430,6 @@ def __str__(self): return "%s.%s" % (self.__class__.__name__, self._name_) - def __dir__(self): - return (['__class__', '__doc__', 'name', 'value']) - def __eq__(self, other): if type(other) is self.__class__: return self is other diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py --- a/Lib/test/test_enum.py +++ b/Lib/test/test_enum.py @@ -78,21 +78,6 @@ def test_intenum_value(self): self.assertEqual(IntStooges.CURLY.value, 2) - def test_dir_on_class(self): - Season = self.Season - self.assertEqual( - set(dir(Season)), - set(['__class__', '__doc__', '__members__', - 'SPRING', 'SUMMER', 'AUTUMN', 'WINTER']), - ) - - def test_dir_on_item(self): - Season = self.Season - self.assertEqual( - set(dir(Season.WINTER)), - set(['__class__', '__doc__', 'name', 'value']), - ) - def test_enum(self): Season = self.Season lst = list(Season) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 22:57:28 2013 From: python-checkins at python.org (terry.reedy) Date: Sat, 10 Aug 2013 22:57:28 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4MjI2?= =?utf-8?q?=3A_Add_docstrings_and_unittests_for_idlelib/FormatParagraph=2E?= =?utf-8?b?cHku?= Message-ID: <3cCFy06YZFzNxJ@mail.python.org> http://hg.python.org/cpython/rev/453b4f89a2b4 changeset: 85105:453b4f89a2b4 branch: 2.7 parent: 85072:0152152b09d0 user: Terry Jan Reedy date: Sat Aug 10 16:56:20 2013 -0400 summary: Issue #18226: Add docstrings and unittests for idlelib/FormatParagraph.py. Move comment code to a separate function so it can be separately tested. Original patches by Todd Rovito and Phil Webster. files: Lib/idlelib/FormatParagraph.py | 133 ++- Lib/idlelib/idle_test/test_formatparagraph.py | 374 ++++++++++ Misc/NEWS | 3 + 3 files changed, 464 insertions(+), 46 deletions(-) diff --git a/Lib/idlelib/FormatParagraph.py b/Lib/idlelib/FormatParagraph.py --- a/Lib/idlelib/FormatParagraph.py +++ b/Lib/idlelib/FormatParagraph.py @@ -1,18 +1,19 @@ -# Extension to format a paragraph +"""Extension to format a paragraph or selection to a max width. -# Does basic, standard text formatting, and also understands Python -# comment blocks. Thus, for editing Python source code, this -# extension is really only suitable for reformatting these comment -# blocks or triple-quoted strings. +Does basic, standard text formatting, and also understands Python +comment blocks. Thus, for editing Python source code, this +extension is really only suitable for reformatting these comment +blocks or triple-quoted strings. -# Known problems with comment reformatting: -# * If there is a selection marked, and the first line of the -# selection is not complete, the block will probably not be detected -# as comments, and will have the normal "text formatting" rules -# applied. -# * If a comment block has leading whitespace that mixes tabs and -# spaces, they will not be considered part of the same block. -# * Fancy comments, like this bulleted list, arent handled :-) +Known problems with comment reformatting: +* If there is a selection marked, and the first line of the + selection is not complete, the block will probably not be detected + as comments, and will have the normal "text formatting" rules + applied. +* If a comment block has leading whitespace that mixes tabs and + spaces, they will not be considered part of the same block. +* Fancy comments, like this bulleted list, aren't handled :-) +""" import re from idlelib.configHandler import idleConf @@ -32,41 +33,31 @@ self.editwin = None def format_paragraph_event(self, event): - maxformatwidth = int(idleConf.GetOption('main','FormatParagraph', - 'paragraph', type='int')) + """Formats paragraph to a max width specified in idleConf. + + If text is selected, format_paragraph_event will start breaking lines + at the max width, starting from the beginning selection. + + If no text is selected, format_paragraph_event uses the current + cursor location to determine the paragraph (lines of text surrounded + by blank lines) and formats it. + """ + maxformatwidth = idleConf.GetOption( + 'main', 'FormatParagraph', 'paragraph', type='int') text = self.editwin.text first, last = self.editwin.get_selection_indices() if first and last: data = text.get(first, last) - comment_header = '' + comment_header = get_comment_header(data) else: first, last, comment_header, data = \ find_paragraph(text, text.index("insert")) if comment_header: - # Reformat the comment lines - convert to text sans header. - lines = data.split("\n") - lines = map(lambda st, l=len(comment_header): st[l:], lines) - data = "\n".join(lines) - # Reformat to maxformatwidth chars or a 20 char width, whichever is greater. - format_width = max(maxformatwidth - len(comment_header), 20) - newdata = reformat_paragraph(data, format_width) - # re-split and re-insert the comment header. - newdata = newdata.split("\n") - # If the block ends in a \n, we dont want the comment - # prefix inserted after it. (Im not sure it makes sense to - # reformat a comment block that isnt made of complete - # lines, but whatever!) Can't think of a clean solution, - # so we hack away - block_suffix = "" - if not newdata[-1]: - block_suffix = "\n" - newdata = newdata[:-1] - builder = lambda item, prefix=comment_header: prefix+item - newdata = '\n'.join(map(builder, newdata)) + block_suffix + newdata = reformat_comment(data, maxformatwidth, comment_header) else: - # Just a normal text format newdata = reformat_paragraph(data, maxformatwidth) text.tag_remove("sel", "1.0", "end") + if newdata != data: text.mark_set("insert", first) text.undo_block_start() @@ -79,31 +70,44 @@ return "break" def find_paragraph(text, mark): + """Returns the start/stop indices enclosing the paragraph that mark is in. + + Also returns the comment format string, if any, and paragraph of text + between the start/stop indices. + """ lineno, col = map(int, mark.split(".")) - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) + + # Look for start of next paragraph if the index passed in is a blank line while text.compare("%d.0" % lineno, "<", "end") and is_all_white(line): lineno = lineno + 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) first_lineno = lineno comment_header = get_comment_header(line) comment_header_len = len(comment_header) + + # Once start line found, search for end of paragraph (a blank line) while get_comment_header(line)==comment_header and \ not is_all_white(line[comment_header_len:]): lineno = lineno + 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) last = "%d.0" % lineno - # Search back to beginning of paragraph + + # Search back to beginning of paragraph (first blank line before) lineno = first_lineno - 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) while lineno > 0 and \ get_comment_header(line)==comment_header and \ not is_all_white(line[comment_header_len:]): lineno = lineno - 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) first = "%d.0" % (lineno+1) + return first, last, comment_header, text.get(first, last) +# This should perhaps be replaced with textwrap.wrap def reformat_paragraph(data, limit): + """Return data reformatted to specified width (limit).""" lines = data.split("\n") i = 0 n = len(lines) @@ -126,7 +130,7 @@ if not word: continue # Can happen when line ends in whitespace if len((partial + word).expandtabs()) > limit and \ - partial != indent1: + partial != indent1: new.append(partial.rstrip()) partial = indent2 partial = partial + word + " " @@ -138,13 +142,50 @@ new.extend(lines[i:]) return "\n".join(new) +def reformat_comment(data, limit, comment_header): + """Return data reformatted to specified width with comment header.""" + + # Remove header from the comment lines + lc = len(comment_header) + data = "\n".join(line[lc:] for line in data.split("\n")) + # Reformat to maxformatwidth chars or a 20 char width, + # whichever is greater. + format_width = max(limit - len(comment_header), 20) + newdata = reformat_paragraph(data, format_width) + # re-split and re-insert the comment header. + newdata = newdata.split("\n") + # If the block ends in a \n, we dont want the comment prefix + # inserted after it. (Im not sure it makes sense to reformat a + # comment block that is not made of complete lines, but whatever!) + # Can't think of a clean solution, so we hack away + block_suffix = "" + if not newdata[-1]: + block_suffix = "\n" + newdata = newdata[:-1] + return '\n'.join(comment_header+line for line in newdata) + block_suffix + def is_all_white(line): + """Return True if line is empty or all whitespace.""" + return re.match(r"^\s*$", line) is not None def get_indent(line): - return re.match(r"^(\s*)", line).group() + """Return the initial space or tab indent of line.""" + return re.match(r"^([ \t]*)", line).group() def get_comment_header(line): - m = re.match(r"^(\s*#*)", line) + """Return string with leading whitespace and '#' from line or ''. + + A null return indicates that the line is not a comment line. A non- + null return, such as ' #', will be used to find the other lines of + a comment block with the same indent. + """ + m = re.match(r"^([ \t]*#*)", line) if m is None: return "" return m.group(1) + +if __name__ == "__main__": + from test import support; support.use_resources = ['gui'] + import unittest + unittest.main('idlelib.idle_test.test_formatparagraph', + verbosity=2, exit=False) diff --git a/Lib/idlelib/idle_test/test_formatparagraph.py b/Lib/idlelib/idle_test/test_formatparagraph.py new file mode 100644 --- /dev/null +++ b/Lib/idlelib/idle_test/test_formatparagraph.py @@ -0,0 +1,374 @@ +# Test the functions and main class method of FormatParagraph.py +import unittest +from idlelib import FormatParagraph as fp +from idlelib.EditorWindow import EditorWindow +from tkinter import Tk, Text, TclError +from test.support import requires + + +class Is_Get_Test(unittest.TestCase): + """Test the is_ and get_ functions""" + test_comment = '# This is a comment' + test_nocomment = 'This is not a comment' + trailingws_comment = '# This is a comment ' + leadingws_comment = ' # This is a comment' + leadingws_nocomment = ' This is not a comment' + + def test_is_all_white(self): + self.assertTrue(fp.is_all_white('')) + self.assertTrue(fp.is_all_white('\t\n\r\f\v')) + self.assertFalse(fp.is_all_white(self.test_comment)) + + def test_get_indent(self): + Equal = self.assertEqual + Equal(fp.get_indent(self.test_comment), '') + Equal(fp.get_indent(self.trailingws_comment), '') + Equal(fp.get_indent(self.leadingws_comment), ' ') + Equal(fp.get_indent(self.leadingws_nocomment), ' ') + + def test_get_comment_header(self): + Equal = self.assertEqual + # Test comment strings + Equal(fp.get_comment_header(self.test_comment), '#') + Equal(fp.get_comment_header(self.trailingws_comment), '#') + Equal(fp.get_comment_header(self.leadingws_comment), ' #') + # Test non-comment strings + Equal(fp.get_comment_header(self.leadingws_nocomment), ' ') + Equal(fp.get_comment_header(self.test_nocomment), '') + + +class FindTest(unittest.TestCase): + """Test the find_paragraph function in FormatParagraph. + + Using the runcase() function, find_paragraph() is called with 'mark' set at + multiple indexes before and inside the test paragraph. + + It appears that code with the same indentation as a quoted string is grouped + as part of the same paragraph, which is probably incorrect behavior. + """ + + @classmethod + def setUpClass(cls): + from idlelib.idle_test.mock_tk import Text + cls.text = Text() + + def runcase(self, inserttext, stopline, expected): + # Check that find_paragraph returns the expected paragraph when + # the mark index is set to beginning, middle, end of each line + # up to but not including the stop line + text = self.text + text.insert('1.0', inserttext) + for line in range(1, stopline): + linelength = int(text.index("%d.end" % line).split('.')[1]) + for col in (0, linelength//2, linelength): + tempindex = "%d.%d" % (line, col) + self.assertEqual(fp.find_paragraph(text, tempindex), expected) + text.delete('1.0', 'end') + + def test_find_comment(self): + comment = ( + "# Comment block with no blank lines before\n" + "# Comment line\n" + "\n") + self.runcase(comment, 3, ('1.0', '3.0', '#', comment[0:58])) + + comment = ( + "\n" + "# Comment block with whitespace line before and after\n" + "# Comment line\n" + "\n") + self.runcase(comment, 4, ('2.0', '4.0', '#', comment[1:70])) + + comment = ( + "\n" + " # Indented comment block with whitespace before and after\n" + " # Comment line\n" + "\n") + self.runcase(comment, 4, ('2.0', '4.0', ' #', comment[1:82])) + + comment = ( + "\n" + "# Single line comment\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:23])) + + comment = ( + "\n" + " # Single line comment with leading whitespace\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', ' #', comment[1:51])) + + comment = ( + "\n" + "# Comment immediately followed by code\n" + "x = 42\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:40])) + + comment = ( + "\n" + " # Indented comment immediately followed by code\n" + "x = 42\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', ' #', comment[1:53])) + + comment = ( + "\n" + "# Comment immediately followed by indented code\n" + " x = 42\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:49])) + + def test_find_paragraph(self): + teststring = ( + '"""String with no blank lines before\n' + 'String line\n' + '"""\n' + '\n') + self.runcase(teststring, 4, ('1.0', '4.0', '', teststring[0:53])) + + teststring = ( + "\n" + '"""String with whitespace line before and after\n' + 'String line.\n' + '"""\n' + '\n') + self.runcase(teststring, 5, ('2.0', '5.0', '', teststring[1:66])) + + teststring = ( + '\n' + ' """Indented string with whitespace before and after\n' + ' Comment string.\n' + ' """\n' + '\n') + self.runcase(teststring, 5, ('2.0', '5.0', ' ', teststring[1:85])) + + teststring = ( + '\n' + '"""Single line string."""\n' + '\n') + self.runcase(teststring, 3, ('2.0', '3.0', '', teststring[1:27])) + + teststring = ( + '\n' + ' """Single line string with leading whitespace."""\n' + '\n') + self.runcase(teststring, 3, ('2.0', '3.0', ' ', teststring[1:55])) + + +class ReformatFunctionTest(unittest.TestCase): + """Test the reformat_paragraph function without the editor window.""" + + def test_reformat_paragrah(self): + Equal = self.assertEqual + reform = fp.reformat_paragraph + hw = "O hello world" + Equal(reform(' ', 1), ' ') + Equal(reform("Hello world", 20), "Hello world") + + # Test without leading newline + Equal(reform(hw, 1), "O\nhello\nworld") + Equal(reform(hw, 6), "O\nhello\nworld") + Equal(reform(hw, 7), "O hello\nworld") + Equal(reform(hw, 12), "O hello\nworld") + Equal(reform(hw, 13), "O hello world") + + # Test with leading newline + hw = "\nO hello world" + Equal(reform(hw, 1), "\nO\nhello\nworld") + Equal(reform(hw, 6), "\nO\nhello\nworld") + Equal(reform(hw, 7), "\nO hello\nworld") + Equal(reform(hw, 12), "\nO hello\nworld") + Equal(reform(hw, 13), "\nO hello world") + + +class ReformatCommentTest(unittest.TestCase): + """Test the reformat_comment function without the editor window.""" + + def test_reformat_comment(self): + Equal = self.assertEqual + + # reformat_comment formats to a minimum of 20 characters + test_string = ( + " \"\"\"this is a test of a reformat for a triple quoted string" + " will it reformat to less than 70 characters for me?\"\"\"") + result = fp.reformat_comment(test_string, 70, " ") + expected = ( + " \"\"\"this is a test of a reformat for a triple quoted string will it\n" + " reformat to less than 70 characters for me?\"\"\"") + Equal(result, expected) + + test_comment = ( + "# this is a test of a reformat for a triple quoted string will " + "it reformat to less than 70 characters for me?") + result = fp.reformat_comment(test_comment, 70, "#") + expected = ( + "# this is a test of a reformat for a triple quoted string will it\n" + "# reformat to less than 70 characters for me?") + Equal(result, expected) + + +class FormatClassTest(unittest.TestCase): + def test_init_close(self): + instance = fp.FormatParagraph('editor') + self.assertEqual(instance.editwin, 'editor') + instance.close() + self.assertEqual(instance.editwin, None) + + +# For testing format_paragraph_event, Initialize FormatParagraph with +# a mock Editor with .text and .get_selection_indices. The text must +# be a Text wrapper that adds two methods + +# A real EditorWindow creates unneeded, time-consuming baggage and +# sometimes emits shutdown warnings like this: +# "warning: callback failed in WindowList +# : invalid command name ".55131368.windows". +# Calling EditorWindow._close in tearDownClass prevents this but causes +# other problems (windows left open). + +class TextWrapper: + def __init__(self, master): + self.text = Text(master=master) + def __getattr__(self, name): + return getattr(self.text, name) + def undo_block_start(self): pass + def undo_block_stop(self): pass + +class Editor: + def __init__(self, root): + self.text = TextWrapper(root) + get_selection_indices = EditorWindow. get_selection_indices + +class FormatEventTest(unittest.TestCase): + """Test the formatting of text inside a Text widget. + + This is done with FormatParagraph.format.paragraph_event, + which calls funtions in the module as appropriate. + """ + test_string = ( + " '''this is a test of a reformat for a triple " + "quoted string will it reformat to less than 70 " + "characters for me?'''\n") + multiline_test_string = ( + " '''The first line is under the max width.\n" + " The second line's length is way over the max width. It goes " + "on and on until it is over 100 characters long.\n" + " Same thing with the third line. It is also way over the max " + "width, but FormatParagraph will fix it.\n" + " '''\n") + multiline_test_comment = ( + "# The first line is under the max width.\n" + "# The second line's length is way over the max width. It goes on " + "and on until it is over 100 characters long.\n" + "# Same thing with the third line. It is also way over the max " + "width, but FormatParagraph will fix it.\n" + "# The fourth line is short like the first line.") + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = Tk() + editor = Editor(root=cls.root) + cls.text = editor.text.text # Test code does not need the wrapper. + cls.formatter = fp.FormatParagraph(editor).format_paragraph_event + # Sets the insert mark just after the re-wrapped and inserted text. + + @classmethod + def tearDownClass(cls): + cls.root.destroy() + + def test_short_line(self): + self.text.insert('1.0', "Short line\n") + self.formatter("Dummy") + self.assertEqual(self.text.get('1.0', 'insert'), "Short line\n" ) + self.text.delete('1.0', 'end') + + def test_long_line(self): + text = self.text + + # Set cursor ('insert' mark) to '1.0', within text. + text.insert('1.0', self.test_string) + text.mark_set('insert', '1.0') + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + # find function includes \n + expected = ( +" '''this is a test of a reformat for a triple quoted string will it\n" +" reformat to less than 70 characters for me?'''\n") # yes + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + # Select from 1.11 to line end. + text.insert('1.0', self.test_string) + text.tag_add('sel', '1.11', '1.end') + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + # selection excludes \n + expected = ( +" '''this is a test of a reformat for a triple quoted string will it reformat\n" +" to less than 70 characters for me?'''") # no + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + def test_multiple_lines(self): + text = self.text + # Select 2 long lines. + text.insert('1.0', self.multiline_test_string) + text.tag_add('sel', '2.0', '4.0') + self.formatter('ParameterDoesNothing') + result = text.get('2.0', 'insert') + expected = ( +" The second line's length is way over the max width. It goes on and\n" +" on until it is over 100 characters long. Same thing with the third\n" +" line. It is also way over the max width, but FormatParagraph will\n" +" fix it.\n") + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + def test_comment_block(self): + text = self.text + + # Set cursor ('insert') to '1.0', within block. + text.insert('1.0', self.multiline_test_comment) + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + expected = ( +"# The first line is under the max width. The second line's length is\n" +"# way over the max width. It goes on and on until it is over 100\n" +"# characters long. Same thing with the third line. It is also way over\n" +"# the max width, but FormatParagraph will fix it. The fourth line is\n" +"# short like the first line.\n") + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + # Select line 2, verify line 1 unaffected. + text.insert('1.0', self.multiline_test_comment) + text.tag_add('sel', '2.0', '3.0') + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + expected = ( +"# The first line is under the max width.\n" +"# The second line's length is way over the max width. It goes on and\n" +"# on until it is over 100 characters long.\n") + self.assertEqual(result, expected) + text.delete('1.0', 'end') + +# The following block worked with EditorWindow but fails with the mock. +# Lines 2 and 3 get pasted together even though the previous block left +# the previous line alone. More investigation is needed. +## # Select lines 3 and 4 +## text.insert('1.0', self.multiline_test_comment) +## text.tag_add('sel', '3.0', '5.0') +## self.formatter('ParameterDoesNothing') +## result = text.get('3.0', 'insert') +## expected = ( +##"# Same thing with the third line. It is also way over the max width,\n" +##"# but FormatParagraph will fix it. The fourth line is short like the\n" +##"# first line.\n") +## self.assertEqual(result, expected) +## text.delete('1.0', 'end') + + +if __name__ == '__main__': + unittest.main(verbosity=2, exit=2) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -113,6 +113,9 @@ IDLE ---- +- Issue #18226: Add docstrings and unittests for FormatParagraph.py. + Original patches by Todd Rovito and Phil Webster. + - Issue #18279: Format - Strip trailing whitespace no longer marks a file as changed when it has not been changed. This fix followed the addition of a test file originally written by Phil Webster (the issue's main goal). -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 22:57:30 2013 From: python-checkins at python.org (terry.reedy) Date: Sat, 10 Aug 2013 22:57:30 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4MjI2?= =?utf-8?q?=3A_Add_docstrings_and_unittests_for_idlelib/FormatParagraph=2E?= =?utf-8?b?cHku?= Message-ID: <3cCFy23kshzPGX@mail.python.org> http://hg.python.org/cpython/rev/bfdb687ca485 changeset: 85106:bfdb687ca485 branch: 3.3 parent: 85101:80b65aa2d579 user: Terry Jan Reedy date: Sat Aug 10 16:56:28 2013 -0400 summary: Issue #18226: Add docstrings and unittests for idlelib/FormatParagraph.py. Move comment code to a separate function so it can be separately tested. Original patches by Todd Rovito and Phil Webster. files: Lib/idlelib/FormatParagraph.py | 134 ++- Lib/idlelib/idle_test/test_formatparagraph.py | 374 ++++++++++ Misc/NEWS | 3 + 3 files changed, 464 insertions(+), 47 deletions(-) diff --git a/Lib/idlelib/FormatParagraph.py b/Lib/idlelib/FormatParagraph.py --- a/Lib/idlelib/FormatParagraph.py +++ b/Lib/idlelib/FormatParagraph.py @@ -1,18 +1,19 @@ -# Extension to format a paragraph +"""Extension to format a paragraph or selection to a max width. -# Does basic, standard text formatting, and also understands Python -# comment blocks. Thus, for editing Python source code, this -# extension is really only suitable for reformatting these comment -# blocks or triple-quoted strings. +Does basic, standard text formatting, and also understands Python +comment blocks. Thus, for editing Python source code, this +extension is really only suitable for reformatting these comment +blocks or triple-quoted strings. -# Known problems with comment reformatting: -# * If there is a selection marked, and the first line of the -# selection is not complete, the block will probably not be detected -# as comments, and will have the normal "text formatting" rules -# applied. -# * If a comment block has leading whitespace that mixes tabs and -# spaces, they will not be considered part of the same block. -# * Fancy comments, like this bulleted list, arent handled :-) +Known problems with comment reformatting: +* If there is a selection marked, and the first line of the + selection is not complete, the block will probably not be detected + as comments, and will have the normal "text formatting" rules + applied. +* If a comment block has leading whitespace that mixes tabs and + spaces, they will not be considered part of the same block. +* Fancy comments, like this bulleted list, aren't handled :-) +""" import re from idlelib.configHandler import idleConf @@ -32,42 +33,31 @@ self.editwin = None def format_paragraph_event(self, event): - maxformatwidth = int(idleConf.GetOption('main', 'FormatParagraph', - 'paragraph', type='int')) + """Formats paragraph to a max width specified in idleConf. + + If text is selected, format_paragraph_event will start breaking lines + at the max width, starting from the beginning selection. + + If no text is selected, format_paragraph_event uses the current + cursor location to determine the paragraph (lines of text surrounded + by blank lines) and formats it. + """ + maxformatwidth = idleConf.GetOption( + 'main', 'FormatParagraph', 'paragraph', type='int') text = self.editwin.text first, last = self.editwin.get_selection_indices() if first and last: data = text.get(first, last) - comment_header = '' + comment_header = get_comment_header(data) else: first, last, comment_header, data = \ find_paragraph(text, text.index("insert")) if comment_header: - # Reformat the comment lines - convert to text sans header. - lines = data.split("\n") - lines = map(lambda st, l=len(comment_header): st[l:], lines) - data = "\n".join(lines) - # Reformat to maxformatwidth chars or a 20 char width, - # whichever is greater. - format_width = max(maxformatwidth - len(comment_header), 20) - newdata = reformat_paragraph(data, format_width) - # re-split and re-insert the comment header. - newdata = newdata.split("\n") - # If the block ends in a \n, we dont want the comment - # prefix inserted after it. (Im not sure it makes sense to - # reformat a comment block that isnt made of complete - # lines, but whatever!) Can't think of a clean solution, - # so we hack away - block_suffix = "" - if not newdata[-1]: - block_suffix = "\n" - newdata = newdata[:-1] - builder = lambda item, prefix=comment_header: prefix+item - newdata = '\n'.join(map(builder, newdata)) + block_suffix + newdata = reformat_comment(data, maxformatwidth, comment_header) else: - # Just a normal text format newdata = reformat_paragraph(data, maxformatwidth) text.tag_remove("sel", "1.0", "end") + if newdata != data: text.mark_set("insert", first) text.undo_block_start() @@ -80,31 +70,44 @@ return "break" def find_paragraph(text, mark): + """Returns the start/stop indices enclosing the paragraph that mark is in. + + Also returns the comment format string, if any, and paragraph of text + between the start/stop indices. + """ lineno, col = map(int, mark.split(".")) - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) + + # Look for start of next paragraph if the index passed in is a blank line while text.compare("%d.0" % lineno, "<", "end") and is_all_white(line): lineno = lineno + 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) first_lineno = lineno comment_header = get_comment_header(line) comment_header_len = len(comment_header) + + # Once start line found, search for end of paragraph (a blank line) while get_comment_header(line)==comment_header and \ not is_all_white(line[comment_header_len:]): lineno = lineno + 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) last = "%d.0" % lineno - # Search back to beginning of paragraph + + # Search back to beginning of paragraph (first blank line before) lineno = first_lineno - 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) while lineno > 0 and \ get_comment_header(line)==comment_header and \ not is_all_white(line[comment_header_len:]): lineno = lineno - 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) first = "%d.0" % (lineno+1) + return first, last, comment_header, text.get(first, last) +# This should perhaps be replaced with textwrap.wrap def reformat_paragraph(data, limit): + """Return data reformatted to specified width (limit).""" lines = data.split("\n") i = 0 n = len(lines) @@ -127,7 +130,7 @@ if not word: continue # Can happen when line ends in whitespace if len((partial + word).expandtabs()) > limit and \ - partial != indent1: + partial != indent1: new.append(partial.rstrip()) partial = indent2 partial = partial + word + " " @@ -139,13 +142,50 @@ new.extend(lines[i:]) return "\n".join(new) +def reformat_comment(data, limit, comment_header): + """Return data reformatted to specified width with comment header.""" + + # Remove header from the comment lines + lc = len(comment_header) + data = "\n".join(line[lc:] for line in data.split("\n")) + # Reformat to maxformatwidth chars or a 20 char width, + # whichever is greater. + format_width = max(limit - len(comment_header), 20) + newdata = reformat_paragraph(data, format_width) + # re-split and re-insert the comment header. + newdata = newdata.split("\n") + # If the block ends in a \n, we dont want the comment prefix + # inserted after it. (Im not sure it makes sense to reformat a + # comment block that is not made of complete lines, but whatever!) + # Can't think of a clean solution, so we hack away + block_suffix = "" + if not newdata[-1]: + block_suffix = "\n" + newdata = newdata[:-1] + return '\n'.join(comment_header+line for line in newdata) + block_suffix + def is_all_white(line): + """Return True if line is empty or all whitespace.""" + return re.match(r"^\s*$", line) is not None def get_indent(line): - return re.match(r"^(\s*)", line).group() + """Return the initial space or tab indent of line.""" + return re.match(r"^([ \t]*)", line).group() def get_comment_header(line): - m = re.match(r"^(\s*#*)", line) + """Return string with leading whitespace and '#' from line or ''. + + A null return indicates that the line is not a comment line. A non- + null return, such as ' #', will be used to find the other lines of + a comment block with the same indent. + """ + m = re.match(r"^([ \t]*#*)", line) if m is None: return "" return m.group(1) + +if __name__ == "__main__": + from test import support; support.use_resources = ['gui'] + import unittest + unittest.main('idlelib.idle_test.test_formatparagraph', + verbosity=2, exit=False) diff --git a/Lib/idlelib/idle_test/test_formatparagraph.py b/Lib/idlelib/idle_test/test_formatparagraph.py new file mode 100644 --- /dev/null +++ b/Lib/idlelib/idle_test/test_formatparagraph.py @@ -0,0 +1,374 @@ +# Test the functions and main class method of FormatParagraph.py +import unittest +from idlelib import FormatParagraph as fp +from idlelib.EditorWindow import EditorWindow +from tkinter import Tk, Text, TclError +from test.support import requires + + +class Is_Get_Test(unittest.TestCase): + """Test the is_ and get_ functions""" + test_comment = '# This is a comment' + test_nocomment = 'This is not a comment' + trailingws_comment = '# This is a comment ' + leadingws_comment = ' # This is a comment' + leadingws_nocomment = ' This is not a comment' + + def test_is_all_white(self): + self.assertTrue(fp.is_all_white('')) + self.assertTrue(fp.is_all_white('\t\n\r\f\v')) + self.assertFalse(fp.is_all_white(self.test_comment)) + + def test_get_indent(self): + Equal = self.assertEqual + Equal(fp.get_indent(self.test_comment), '') + Equal(fp.get_indent(self.trailingws_comment), '') + Equal(fp.get_indent(self.leadingws_comment), ' ') + Equal(fp.get_indent(self.leadingws_nocomment), ' ') + + def test_get_comment_header(self): + Equal = self.assertEqual + # Test comment strings + Equal(fp.get_comment_header(self.test_comment), '#') + Equal(fp.get_comment_header(self.trailingws_comment), '#') + Equal(fp.get_comment_header(self.leadingws_comment), ' #') + # Test non-comment strings + Equal(fp.get_comment_header(self.leadingws_nocomment), ' ') + Equal(fp.get_comment_header(self.test_nocomment), '') + + +class FindTest(unittest.TestCase): + """Test the find_paragraph function in FormatParagraph. + + Using the runcase() function, find_paragraph() is called with 'mark' set at + multiple indexes before and inside the test paragraph. + + It appears that code with the same indentation as a quoted string is grouped + as part of the same paragraph, which is probably incorrect behavior. + """ + + @classmethod + def setUpClass(cls): + from idlelib.idle_test.mock_tk import Text + cls.text = Text() + + def runcase(self, inserttext, stopline, expected): + # Check that find_paragraph returns the expected paragraph when + # the mark index is set to beginning, middle, end of each line + # up to but not including the stop line + text = self.text + text.insert('1.0', inserttext) + for line in range(1, stopline): + linelength = int(text.index("%d.end" % line).split('.')[1]) + for col in (0, linelength//2, linelength): + tempindex = "%d.%d" % (line, col) + self.assertEqual(fp.find_paragraph(text, tempindex), expected) + text.delete('1.0', 'end') + + def test_find_comment(self): + comment = ( + "# Comment block with no blank lines before\n" + "# Comment line\n" + "\n") + self.runcase(comment, 3, ('1.0', '3.0', '#', comment[0:58])) + + comment = ( + "\n" + "# Comment block with whitespace line before and after\n" + "# Comment line\n" + "\n") + self.runcase(comment, 4, ('2.0', '4.0', '#', comment[1:70])) + + comment = ( + "\n" + " # Indented comment block with whitespace before and after\n" + " # Comment line\n" + "\n") + self.runcase(comment, 4, ('2.0', '4.0', ' #', comment[1:82])) + + comment = ( + "\n" + "# Single line comment\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:23])) + + comment = ( + "\n" + " # Single line comment with leading whitespace\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', ' #', comment[1:51])) + + comment = ( + "\n" + "# Comment immediately followed by code\n" + "x = 42\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:40])) + + comment = ( + "\n" + " # Indented comment immediately followed by code\n" + "x = 42\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', ' #', comment[1:53])) + + comment = ( + "\n" + "# Comment immediately followed by indented code\n" + " x = 42\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:49])) + + def test_find_paragraph(self): + teststring = ( + '"""String with no blank lines before\n' + 'String line\n' + '"""\n' + '\n') + self.runcase(teststring, 4, ('1.0', '4.0', '', teststring[0:53])) + + teststring = ( + "\n" + '"""String with whitespace line before and after\n' + 'String line.\n' + '"""\n' + '\n') + self.runcase(teststring, 5, ('2.0', '5.0', '', teststring[1:66])) + + teststring = ( + '\n' + ' """Indented string with whitespace before and after\n' + ' Comment string.\n' + ' """\n' + '\n') + self.runcase(teststring, 5, ('2.0', '5.0', ' ', teststring[1:85])) + + teststring = ( + '\n' + '"""Single line string."""\n' + '\n') + self.runcase(teststring, 3, ('2.0', '3.0', '', teststring[1:27])) + + teststring = ( + '\n' + ' """Single line string with leading whitespace."""\n' + '\n') + self.runcase(teststring, 3, ('2.0', '3.0', ' ', teststring[1:55])) + + +class ReformatFunctionTest(unittest.TestCase): + """Test the reformat_paragraph function without the editor window.""" + + def test_reformat_paragrah(self): + Equal = self.assertEqual + reform = fp.reformat_paragraph + hw = "O hello world" + Equal(reform(' ', 1), ' ') + Equal(reform("Hello world", 20), "Hello world") + + # Test without leading newline + Equal(reform(hw, 1), "O\nhello\nworld") + Equal(reform(hw, 6), "O\nhello\nworld") + Equal(reform(hw, 7), "O hello\nworld") + Equal(reform(hw, 12), "O hello\nworld") + Equal(reform(hw, 13), "O hello world") + + # Test with leading newline + hw = "\nO hello world" + Equal(reform(hw, 1), "\nO\nhello\nworld") + Equal(reform(hw, 6), "\nO\nhello\nworld") + Equal(reform(hw, 7), "\nO hello\nworld") + Equal(reform(hw, 12), "\nO hello\nworld") + Equal(reform(hw, 13), "\nO hello world") + + +class ReformatCommentTest(unittest.TestCase): + """Test the reformat_comment function without the editor window.""" + + def test_reformat_comment(self): + Equal = self.assertEqual + + # reformat_comment formats to a minimum of 20 characters + test_string = ( + " \"\"\"this is a test of a reformat for a triple quoted string" + " will it reformat to less than 70 characters for me?\"\"\"") + result = fp.reformat_comment(test_string, 70, " ") + expected = ( + " \"\"\"this is a test of a reformat for a triple quoted string will it\n" + " reformat to less than 70 characters for me?\"\"\"") + Equal(result, expected) + + test_comment = ( + "# this is a test of a reformat for a triple quoted string will " + "it reformat to less than 70 characters for me?") + result = fp.reformat_comment(test_comment, 70, "#") + expected = ( + "# this is a test of a reformat for a triple quoted string will it\n" + "# reformat to less than 70 characters for me?") + Equal(result, expected) + + +class FormatClassTest(unittest.TestCase): + def test_init_close(self): + instance = fp.FormatParagraph('editor') + self.assertEqual(instance.editwin, 'editor') + instance.close() + self.assertEqual(instance.editwin, None) + + +# For testing format_paragraph_event, Initialize FormatParagraph with +# a mock Editor with .text and .get_selection_indices. The text must +# be a Text wrapper that adds two methods + +# A real EditorWindow creates unneeded, time-consuming baggage and +# sometimes emits shutdown warnings like this: +# "warning: callback failed in WindowList +# : invalid command name ".55131368.windows". +# Calling EditorWindow._close in tearDownClass prevents this but causes +# other problems (windows left open). + +class TextWrapper: + def __init__(self, master): + self.text = Text(master=master) + def __getattr__(self, name): + return getattr(self.text, name) + def undo_block_start(self): pass + def undo_block_stop(self): pass + +class Editor: + def __init__(self, root): + self.text = TextWrapper(root) + get_selection_indices = EditorWindow. get_selection_indices + +class FormatEventTest(unittest.TestCase): + """Test the formatting of text inside a Text widget. + + This is done with FormatParagraph.format.paragraph_event, + which calls funtions in the module as appropriate. + """ + test_string = ( + " '''this is a test of a reformat for a triple " + "quoted string will it reformat to less than 70 " + "characters for me?'''\n") + multiline_test_string = ( + " '''The first line is under the max width.\n" + " The second line's length is way over the max width. It goes " + "on and on until it is over 100 characters long.\n" + " Same thing with the third line. It is also way over the max " + "width, but FormatParagraph will fix it.\n" + " '''\n") + multiline_test_comment = ( + "# The first line is under the max width.\n" + "# The second line's length is way over the max width. It goes on " + "and on until it is over 100 characters long.\n" + "# Same thing with the third line. It is also way over the max " + "width, but FormatParagraph will fix it.\n" + "# The fourth line is short like the first line.") + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = Tk() + editor = Editor(root=cls.root) + cls.text = editor.text.text # Test code does not need the wrapper. + cls.formatter = fp.FormatParagraph(editor).format_paragraph_event + # Sets the insert mark just after the re-wrapped and inserted text. + + @classmethod + def tearDownClass(cls): + cls.root.destroy() + + def test_short_line(self): + self.text.insert('1.0', "Short line\n") + self.formatter("Dummy") + self.assertEqual(self.text.get('1.0', 'insert'), "Short line\n" ) + self.text.delete('1.0', 'end') + + def test_long_line(self): + text = self.text + + # Set cursor ('insert' mark) to '1.0', within text. + text.insert('1.0', self.test_string) + text.mark_set('insert', '1.0') + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + # find function includes \n + expected = ( +" '''this is a test of a reformat for a triple quoted string will it\n" +" reformat to less than 70 characters for me?'''\n") # yes + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + # Select from 1.11 to line end. + text.insert('1.0', self.test_string) + text.tag_add('sel', '1.11', '1.end') + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + # selection excludes \n + expected = ( +" '''this is a test of a reformat for a triple quoted string will it reformat\n" +" to less than 70 characters for me?'''") # no + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + def test_multiple_lines(self): + text = self.text + # Select 2 long lines. + text.insert('1.0', self.multiline_test_string) + text.tag_add('sel', '2.0', '4.0') + self.formatter('ParameterDoesNothing') + result = text.get('2.0', 'insert') + expected = ( +" The second line's length is way over the max width. It goes on and\n" +" on until it is over 100 characters long. Same thing with the third\n" +" line. It is also way over the max width, but FormatParagraph will\n" +" fix it.\n") + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + def test_comment_block(self): + text = self.text + + # Set cursor ('insert') to '1.0', within block. + text.insert('1.0', self.multiline_test_comment) + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + expected = ( +"# The first line is under the max width. The second line's length is\n" +"# way over the max width. It goes on and on until it is over 100\n" +"# characters long. Same thing with the third line. It is also way over\n" +"# the max width, but FormatParagraph will fix it. The fourth line is\n" +"# short like the first line.\n") + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + # Select line 2, verify line 1 unaffected. + text.insert('1.0', self.multiline_test_comment) + text.tag_add('sel', '2.0', '3.0') + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + expected = ( +"# The first line is under the max width.\n" +"# The second line's length is way over the max width. It goes on and\n" +"# on until it is over 100 characters long.\n") + self.assertEqual(result, expected) + text.delete('1.0', 'end') + +# The following block worked with EditorWindow but fails with the mock. +# Lines 2 and 3 get pasted together even though the previous block left +# the previous line alone. More investigation is needed. +## # Select lines 3 and 4 +## text.insert('1.0', self.multiline_test_comment) +## text.tag_add('sel', '3.0', '5.0') +## self.formatter('ParameterDoesNothing') +## result = text.get('3.0', 'insert') +## expected = ( +##"# Same thing with the third line. It is also way over the max width,\n" +##"# but FormatParagraph will fix it. The fourth line is short like the\n" +##"# first line.\n") +## self.assertEqual(result, expected) +## text.delete('1.0', 'end') + + +if __name__ == '__main__': + unittest.main(verbosity=2, exit=2) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -212,6 +212,9 @@ IDLE ---- +- Issue #18226: Add docstrings and unittests for FormatParagraph.py. + Original patches by Todd Rovito and Phil Webster. + - Issue #18279: Format - Strip trailing whitespace no longer marks a file as changed when it has not been changed. This fix followed the addition of a test file originally written by Phil Webster (the issue's main goal). -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 22:57:32 2013 From: python-checkins at python.org (terry.reedy) Date: Sat, 10 Aug 2013 22:57:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_with_3=2E3?= Message-ID: <3cCFy40zQjzPlK@mail.python.org> http://hg.python.org/cpython/rev/b4a30d329f49 changeset: 85107:b4a30d329f49 parent: 85104:5d417257748e parent: 85106:bfdb687ca485 user: Terry Jan Reedy date: Sat Aug 10 16:57:02 2013 -0400 summary: Merge with 3.3 files: Lib/idlelib/FormatParagraph.py | 134 ++- Lib/idlelib/idle_test/test_formatparagraph.py | 374 ++++++++++ Misc/NEWS | 3 + 3 files changed, 464 insertions(+), 47 deletions(-) diff --git a/Lib/idlelib/FormatParagraph.py b/Lib/idlelib/FormatParagraph.py --- a/Lib/idlelib/FormatParagraph.py +++ b/Lib/idlelib/FormatParagraph.py @@ -1,18 +1,19 @@ -# Extension to format a paragraph +"""Extension to format a paragraph or selection to a max width. -# Does basic, standard text formatting, and also understands Python -# comment blocks. Thus, for editing Python source code, this -# extension is really only suitable for reformatting these comment -# blocks or triple-quoted strings. +Does basic, standard text formatting, and also understands Python +comment blocks. Thus, for editing Python source code, this +extension is really only suitable for reformatting these comment +blocks or triple-quoted strings. -# Known problems with comment reformatting: -# * If there is a selection marked, and the first line of the -# selection is not complete, the block will probably not be detected -# as comments, and will have the normal "text formatting" rules -# applied. -# * If a comment block has leading whitespace that mixes tabs and -# spaces, they will not be considered part of the same block. -# * Fancy comments, like this bulleted list, arent handled :-) +Known problems with comment reformatting: +* If there is a selection marked, and the first line of the + selection is not complete, the block will probably not be detected + as comments, and will have the normal "text formatting" rules + applied. +* If a comment block has leading whitespace that mixes tabs and + spaces, they will not be considered part of the same block. +* Fancy comments, like this bulleted list, aren't handled :-) +""" import re from idlelib.configHandler import idleConf @@ -32,42 +33,31 @@ self.editwin = None def format_paragraph_event(self, event): - maxformatwidth = int(idleConf.GetOption('main', 'FormatParagraph', - 'paragraph', type='int')) + """Formats paragraph to a max width specified in idleConf. + + If text is selected, format_paragraph_event will start breaking lines + at the max width, starting from the beginning selection. + + If no text is selected, format_paragraph_event uses the current + cursor location to determine the paragraph (lines of text surrounded + by blank lines) and formats it. + """ + maxformatwidth = idleConf.GetOption( + 'main', 'FormatParagraph', 'paragraph', type='int') text = self.editwin.text first, last = self.editwin.get_selection_indices() if first and last: data = text.get(first, last) - comment_header = '' + comment_header = get_comment_header(data) else: first, last, comment_header, data = \ find_paragraph(text, text.index("insert")) if comment_header: - # Reformat the comment lines - convert to text sans header. - lines = data.split("\n") - lines = map(lambda st, l=len(comment_header): st[l:], lines) - data = "\n".join(lines) - # Reformat to maxformatwidth chars or a 20 char width, - # whichever is greater. - format_width = max(maxformatwidth - len(comment_header), 20) - newdata = reformat_paragraph(data, format_width) - # re-split and re-insert the comment header. - newdata = newdata.split("\n") - # If the block ends in a \n, we dont want the comment - # prefix inserted after it. (Im not sure it makes sense to - # reformat a comment block that isnt made of complete - # lines, but whatever!) Can't think of a clean solution, - # so we hack away - block_suffix = "" - if not newdata[-1]: - block_suffix = "\n" - newdata = newdata[:-1] - builder = lambda item, prefix=comment_header: prefix+item - newdata = '\n'.join(map(builder, newdata)) + block_suffix + newdata = reformat_comment(data, maxformatwidth, comment_header) else: - # Just a normal text format newdata = reformat_paragraph(data, maxformatwidth) text.tag_remove("sel", "1.0", "end") + if newdata != data: text.mark_set("insert", first) text.undo_block_start() @@ -80,31 +70,44 @@ return "break" def find_paragraph(text, mark): + """Returns the start/stop indices enclosing the paragraph that mark is in. + + Also returns the comment format string, if any, and paragraph of text + between the start/stop indices. + """ lineno, col = map(int, mark.split(".")) - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) + + # Look for start of next paragraph if the index passed in is a blank line while text.compare("%d.0" % lineno, "<", "end") and is_all_white(line): lineno = lineno + 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) first_lineno = lineno comment_header = get_comment_header(line) comment_header_len = len(comment_header) + + # Once start line found, search for end of paragraph (a blank line) while get_comment_header(line)==comment_header and \ not is_all_white(line[comment_header_len:]): lineno = lineno + 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) last = "%d.0" % lineno - # Search back to beginning of paragraph + + # Search back to beginning of paragraph (first blank line before) lineno = first_lineno - 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) while lineno > 0 and \ get_comment_header(line)==comment_header and \ not is_all_white(line[comment_header_len:]): lineno = lineno - 1 - line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno) + line = text.get("%d.0" % lineno, "%d.end" % lineno) first = "%d.0" % (lineno+1) + return first, last, comment_header, text.get(first, last) +# This should perhaps be replaced with textwrap.wrap def reformat_paragraph(data, limit): + """Return data reformatted to specified width (limit).""" lines = data.split("\n") i = 0 n = len(lines) @@ -127,7 +130,7 @@ if not word: continue # Can happen when line ends in whitespace if len((partial + word).expandtabs()) > limit and \ - partial != indent1: + partial != indent1: new.append(partial.rstrip()) partial = indent2 partial = partial + word + " " @@ -139,13 +142,50 @@ new.extend(lines[i:]) return "\n".join(new) +def reformat_comment(data, limit, comment_header): + """Return data reformatted to specified width with comment header.""" + + # Remove header from the comment lines + lc = len(comment_header) + data = "\n".join(line[lc:] for line in data.split("\n")) + # Reformat to maxformatwidth chars or a 20 char width, + # whichever is greater. + format_width = max(limit - len(comment_header), 20) + newdata = reformat_paragraph(data, format_width) + # re-split and re-insert the comment header. + newdata = newdata.split("\n") + # If the block ends in a \n, we dont want the comment prefix + # inserted after it. (Im not sure it makes sense to reformat a + # comment block that is not made of complete lines, but whatever!) + # Can't think of a clean solution, so we hack away + block_suffix = "" + if not newdata[-1]: + block_suffix = "\n" + newdata = newdata[:-1] + return '\n'.join(comment_header+line for line in newdata) + block_suffix + def is_all_white(line): + """Return True if line is empty or all whitespace.""" + return re.match(r"^\s*$", line) is not None def get_indent(line): - return re.match(r"^(\s*)", line).group() + """Return the initial space or tab indent of line.""" + return re.match(r"^([ \t]*)", line).group() def get_comment_header(line): - m = re.match(r"^(\s*#*)", line) + """Return string with leading whitespace and '#' from line or ''. + + A null return indicates that the line is not a comment line. A non- + null return, such as ' #', will be used to find the other lines of + a comment block with the same indent. + """ + m = re.match(r"^([ \t]*#*)", line) if m is None: return "" return m.group(1) + +if __name__ == "__main__": + from test import support; support.use_resources = ['gui'] + import unittest + unittest.main('idlelib.idle_test.test_formatparagraph', + verbosity=2, exit=False) diff --git a/Lib/idlelib/idle_test/test_formatparagraph.py b/Lib/idlelib/idle_test/test_formatparagraph.py new file mode 100644 --- /dev/null +++ b/Lib/idlelib/idle_test/test_formatparagraph.py @@ -0,0 +1,374 @@ +# Test the functions and main class method of FormatParagraph.py +import unittest +from idlelib import FormatParagraph as fp +from idlelib.EditorWindow import EditorWindow +from tkinter import Tk, Text, TclError +from test.support import requires + + +class Is_Get_Test(unittest.TestCase): + """Test the is_ and get_ functions""" + test_comment = '# This is a comment' + test_nocomment = 'This is not a comment' + trailingws_comment = '# This is a comment ' + leadingws_comment = ' # This is a comment' + leadingws_nocomment = ' This is not a comment' + + def test_is_all_white(self): + self.assertTrue(fp.is_all_white('')) + self.assertTrue(fp.is_all_white('\t\n\r\f\v')) + self.assertFalse(fp.is_all_white(self.test_comment)) + + def test_get_indent(self): + Equal = self.assertEqual + Equal(fp.get_indent(self.test_comment), '') + Equal(fp.get_indent(self.trailingws_comment), '') + Equal(fp.get_indent(self.leadingws_comment), ' ') + Equal(fp.get_indent(self.leadingws_nocomment), ' ') + + def test_get_comment_header(self): + Equal = self.assertEqual + # Test comment strings + Equal(fp.get_comment_header(self.test_comment), '#') + Equal(fp.get_comment_header(self.trailingws_comment), '#') + Equal(fp.get_comment_header(self.leadingws_comment), ' #') + # Test non-comment strings + Equal(fp.get_comment_header(self.leadingws_nocomment), ' ') + Equal(fp.get_comment_header(self.test_nocomment), '') + + +class FindTest(unittest.TestCase): + """Test the find_paragraph function in FormatParagraph. + + Using the runcase() function, find_paragraph() is called with 'mark' set at + multiple indexes before and inside the test paragraph. + + It appears that code with the same indentation as a quoted string is grouped + as part of the same paragraph, which is probably incorrect behavior. + """ + + @classmethod + def setUpClass(cls): + from idlelib.idle_test.mock_tk import Text + cls.text = Text() + + def runcase(self, inserttext, stopline, expected): + # Check that find_paragraph returns the expected paragraph when + # the mark index is set to beginning, middle, end of each line + # up to but not including the stop line + text = self.text + text.insert('1.0', inserttext) + for line in range(1, stopline): + linelength = int(text.index("%d.end" % line).split('.')[1]) + for col in (0, linelength//2, linelength): + tempindex = "%d.%d" % (line, col) + self.assertEqual(fp.find_paragraph(text, tempindex), expected) + text.delete('1.0', 'end') + + def test_find_comment(self): + comment = ( + "# Comment block with no blank lines before\n" + "# Comment line\n" + "\n") + self.runcase(comment, 3, ('1.0', '3.0', '#', comment[0:58])) + + comment = ( + "\n" + "# Comment block with whitespace line before and after\n" + "# Comment line\n" + "\n") + self.runcase(comment, 4, ('2.0', '4.0', '#', comment[1:70])) + + comment = ( + "\n" + " # Indented comment block with whitespace before and after\n" + " # Comment line\n" + "\n") + self.runcase(comment, 4, ('2.0', '4.0', ' #', comment[1:82])) + + comment = ( + "\n" + "# Single line comment\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:23])) + + comment = ( + "\n" + " # Single line comment with leading whitespace\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', ' #', comment[1:51])) + + comment = ( + "\n" + "# Comment immediately followed by code\n" + "x = 42\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:40])) + + comment = ( + "\n" + " # Indented comment immediately followed by code\n" + "x = 42\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', ' #', comment[1:53])) + + comment = ( + "\n" + "# Comment immediately followed by indented code\n" + " x = 42\n" + "\n") + self.runcase(comment, 3, ('2.0', '3.0', '#', comment[1:49])) + + def test_find_paragraph(self): + teststring = ( + '"""String with no blank lines before\n' + 'String line\n' + '"""\n' + '\n') + self.runcase(teststring, 4, ('1.0', '4.0', '', teststring[0:53])) + + teststring = ( + "\n" + '"""String with whitespace line before and after\n' + 'String line.\n' + '"""\n' + '\n') + self.runcase(teststring, 5, ('2.0', '5.0', '', teststring[1:66])) + + teststring = ( + '\n' + ' """Indented string with whitespace before and after\n' + ' Comment string.\n' + ' """\n' + '\n') + self.runcase(teststring, 5, ('2.0', '5.0', ' ', teststring[1:85])) + + teststring = ( + '\n' + '"""Single line string."""\n' + '\n') + self.runcase(teststring, 3, ('2.0', '3.0', '', teststring[1:27])) + + teststring = ( + '\n' + ' """Single line string with leading whitespace."""\n' + '\n') + self.runcase(teststring, 3, ('2.0', '3.0', ' ', teststring[1:55])) + + +class ReformatFunctionTest(unittest.TestCase): + """Test the reformat_paragraph function without the editor window.""" + + def test_reformat_paragrah(self): + Equal = self.assertEqual + reform = fp.reformat_paragraph + hw = "O hello world" + Equal(reform(' ', 1), ' ') + Equal(reform("Hello world", 20), "Hello world") + + # Test without leading newline + Equal(reform(hw, 1), "O\nhello\nworld") + Equal(reform(hw, 6), "O\nhello\nworld") + Equal(reform(hw, 7), "O hello\nworld") + Equal(reform(hw, 12), "O hello\nworld") + Equal(reform(hw, 13), "O hello world") + + # Test with leading newline + hw = "\nO hello world" + Equal(reform(hw, 1), "\nO\nhello\nworld") + Equal(reform(hw, 6), "\nO\nhello\nworld") + Equal(reform(hw, 7), "\nO hello\nworld") + Equal(reform(hw, 12), "\nO hello\nworld") + Equal(reform(hw, 13), "\nO hello world") + + +class ReformatCommentTest(unittest.TestCase): + """Test the reformat_comment function without the editor window.""" + + def test_reformat_comment(self): + Equal = self.assertEqual + + # reformat_comment formats to a minimum of 20 characters + test_string = ( + " \"\"\"this is a test of a reformat for a triple quoted string" + " will it reformat to less than 70 characters for me?\"\"\"") + result = fp.reformat_comment(test_string, 70, " ") + expected = ( + " \"\"\"this is a test of a reformat for a triple quoted string will it\n" + " reformat to less than 70 characters for me?\"\"\"") + Equal(result, expected) + + test_comment = ( + "# this is a test of a reformat for a triple quoted string will " + "it reformat to less than 70 characters for me?") + result = fp.reformat_comment(test_comment, 70, "#") + expected = ( + "# this is a test of a reformat for a triple quoted string will it\n" + "# reformat to less than 70 characters for me?") + Equal(result, expected) + + +class FormatClassTest(unittest.TestCase): + def test_init_close(self): + instance = fp.FormatParagraph('editor') + self.assertEqual(instance.editwin, 'editor') + instance.close() + self.assertEqual(instance.editwin, None) + + +# For testing format_paragraph_event, Initialize FormatParagraph with +# a mock Editor with .text and .get_selection_indices. The text must +# be a Text wrapper that adds two methods + +# A real EditorWindow creates unneeded, time-consuming baggage and +# sometimes emits shutdown warnings like this: +# "warning: callback failed in WindowList +# : invalid command name ".55131368.windows". +# Calling EditorWindow._close in tearDownClass prevents this but causes +# other problems (windows left open). + +class TextWrapper: + def __init__(self, master): + self.text = Text(master=master) + def __getattr__(self, name): + return getattr(self.text, name) + def undo_block_start(self): pass + def undo_block_stop(self): pass + +class Editor: + def __init__(self, root): + self.text = TextWrapper(root) + get_selection_indices = EditorWindow. get_selection_indices + +class FormatEventTest(unittest.TestCase): + """Test the formatting of text inside a Text widget. + + This is done with FormatParagraph.format.paragraph_event, + which calls funtions in the module as appropriate. + """ + test_string = ( + " '''this is a test of a reformat for a triple " + "quoted string will it reformat to less than 70 " + "characters for me?'''\n") + multiline_test_string = ( + " '''The first line is under the max width.\n" + " The second line's length is way over the max width. It goes " + "on and on until it is over 100 characters long.\n" + " Same thing with the third line. It is also way over the max " + "width, but FormatParagraph will fix it.\n" + " '''\n") + multiline_test_comment = ( + "# The first line is under the max width.\n" + "# The second line's length is way over the max width. It goes on " + "and on until it is over 100 characters long.\n" + "# Same thing with the third line. It is also way over the max " + "width, but FormatParagraph will fix it.\n" + "# The fourth line is short like the first line.") + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = Tk() + editor = Editor(root=cls.root) + cls.text = editor.text.text # Test code does not need the wrapper. + cls.formatter = fp.FormatParagraph(editor).format_paragraph_event + # Sets the insert mark just after the re-wrapped and inserted text. + + @classmethod + def tearDownClass(cls): + cls.root.destroy() + + def test_short_line(self): + self.text.insert('1.0', "Short line\n") + self.formatter("Dummy") + self.assertEqual(self.text.get('1.0', 'insert'), "Short line\n" ) + self.text.delete('1.0', 'end') + + def test_long_line(self): + text = self.text + + # Set cursor ('insert' mark) to '1.0', within text. + text.insert('1.0', self.test_string) + text.mark_set('insert', '1.0') + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + # find function includes \n + expected = ( +" '''this is a test of a reformat for a triple quoted string will it\n" +" reformat to less than 70 characters for me?'''\n") # yes + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + # Select from 1.11 to line end. + text.insert('1.0', self.test_string) + text.tag_add('sel', '1.11', '1.end') + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + # selection excludes \n + expected = ( +" '''this is a test of a reformat for a triple quoted string will it reformat\n" +" to less than 70 characters for me?'''") # no + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + def test_multiple_lines(self): + text = self.text + # Select 2 long lines. + text.insert('1.0', self.multiline_test_string) + text.tag_add('sel', '2.0', '4.0') + self.formatter('ParameterDoesNothing') + result = text.get('2.0', 'insert') + expected = ( +" The second line's length is way over the max width. It goes on and\n" +" on until it is over 100 characters long. Same thing with the third\n" +" line. It is also way over the max width, but FormatParagraph will\n" +" fix it.\n") + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + def test_comment_block(self): + text = self.text + + # Set cursor ('insert') to '1.0', within block. + text.insert('1.0', self.multiline_test_comment) + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + expected = ( +"# The first line is under the max width. The second line's length is\n" +"# way over the max width. It goes on and on until it is over 100\n" +"# characters long. Same thing with the third line. It is also way over\n" +"# the max width, but FormatParagraph will fix it. The fourth line is\n" +"# short like the first line.\n") + self.assertEqual(result, expected) + text.delete('1.0', 'end') + + # Select line 2, verify line 1 unaffected. + text.insert('1.0', self.multiline_test_comment) + text.tag_add('sel', '2.0', '3.0') + self.formatter('ParameterDoesNothing') + result = text.get('1.0', 'insert') + expected = ( +"# The first line is under the max width.\n" +"# The second line's length is way over the max width. It goes on and\n" +"# on until it is over 100 characters long.\n") + self.assertEqual(result, expected) + text.delete('1.0', 'end') + +# The following block worked with EditorWindow but fails with the mock. +# Lines 2 and 3 get pasted together even though the previous block left +# the previous line alone. More investigation is needed. +## # Select lines 3 and 4 +## text.insert('1.0', self.multiline_test_comment) +## text.tag_add('sel', '3.0', '5.0') +## self.formatter('ParameterDoesNothing') +## result = text.get('3.0', 'insert') +## expected = ( +##"# Same thing with the third line. It is also way over the max width,\n" +##"# but FormatParagraph will fix it. The fourth line is short like the\n" +##"# first line.\n") +## self.assertEqual(result, expected) +## text.delete('1.0', 'end') + + +if __name__ == '__main__': + unittest.main(verbosity=2, exit=2) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -770,6 +770,9 @@ IDLE ---- +- Issue #18226: Add docstrings and unittests for FormatParagraph.py. + Original patches by Todd Rovito and Phil Webster. + - Issue #18279: Format - Strip trailing whitespace no longer marks a file as changed when it has not been changed. This fix followed the addition of a test file originally written by Phil Webster (the issue's main goal). -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 23:17:03 2013 From: python-checkins at python.org (nick.coghlan) Date: Sat, 10 Aug 2013 23:17:03 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Tweak_name_rules_for_export_g?= =?utf-8?q?roups_and_extensions?= Message-ID: <3cCGNb5DyPzMfc@mail.python.org> http://hg.python.org/peps/rev/f9e60037fe47 changeset: 5049:f9e60037fe47 user: Nick Coghlan date: Sat Aug 10 17:16:46 2013 -0400 summary: Tweak name rules for export groups and extensions files: pep-0426.txt | 18 +++++++++++++++--- pep-0426/pydist-schema.json | 16 ++++++++++++++-- 2 files changed, 29 insertions(+), 5 deletions(-) diff --git a/pep-0426.txt b/pep-0426.txt --- a/pep-0426.txt +++ b/pep-0426.txt @@ -259,6 +259,10 @@ and the qualified name of the object relative to the containing module or package. +A "prefixed name" starts with a qualified name, but is not necessarily a +qualified name - it may contain additional dot separated segments which are +not valid identifiers. + Integration and deployment of distributions ------------------------------------------- @@ -1591,7 +1595,7 @@ Example:: "commands": { - "wrap_console": [{"wrapwithpython": "chair.run_cli"}], + "wrap_console": [{"wrapwithpython": "chair:run_cli"}], "wrap_gui": [{"wrapwithpythonw": "chair:run_gui"}], "prebuilt": ["notawrapper"] } @@ -1601,7 +1605,7 @@ Exports ------- -The ``exports`` field is a mapping containing qualified names as keys. Each +The ``exports`` field is a mapping containing prefixed names as keys. Each key identifies an export group containing one or more exports published by the distribution. @@ -1624,6 +1628,14 @@ distribution to determine whether or not an export is relevant without needing to import every exporting module. +Example:: + + "exports": { + "nose.plugins.0.10": { + "chairtest": "chair:NosePlugin" + } + } + Install hooks ============= @@ -1738,7 +1750,7 @@ =================== Extensions to the metadata may be present in a mapping under the -'extensions' key. The keys must be valid qualified names, while +'extensions' key. The keys must be valid prefixed names, while the values may be any type natively supported in JSON:: "extensions" : { diff --git a/pep-0426/pydist-schema.json b/pep-0426/pydist-schema.json --- a/pep-0426/pydist-schema.json +++ b/pep-0426/pydist-schema.json @@ -191,7 +191,8 @@ }, "extensions": { "description": "Extensions to the metadata may be present in a mapping under the 'extensions' key.", - "type": "object" + "type": "object", + "$ref": "#/definitions/extensions" } }, @@ -271,7 +272,7 @@ "exports": { "type": "object", "patternProperties": { - "^[A-Za-z]([0-9A-Za-z_])*([.][A-Za-z]([0-9A-Za-z_])*)*$": { + "^[A-Za-z][0-9A-Za-z_]*([.][0-9A-Za-z_]*)*$": { "type": "object", "patternProperties": { ".": { @@ -284,6 +285,13 @@ }, "additionalProperties": false }, + "extensions": { + "type": "object", + "patternProperties": { + "^[A-Za-z][0-9A-Za-z_]*([.][0-9A-Za-z_]*)*$": {} + }, + "additionalProperties": false + }, "command_map": { "type": "object", "patternProperties": { @@ -324,6 +332,10 @@ "qualified_name" : { "type": "string", "pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*$" + }, + "prefixed_name" : { + "type": "string", + "pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_0-9]*)*$" } } } -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Aug 10 23:47:11 2013 From: python-checkins at python.org (terry.reedy) Date: Sat, 10 Aug 2013 23:47:11 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NDI5?= =?utf-8?q?=3A_Add_user-oriented_News_entry_about_Format_/_Format_Paragrap?= =?utf-8?q?h?= Message-ID: <3cCH3M4d26zPGX@mail.python.org> http://hg.python.org/cpython/rev/841984e96df6 changeset: 85108:841984e96df6 branch: 2.7 parent: 85105:453b4f89a2b4 user: Terry Jan Reedy date: Sat Aug 10 17:46:16 2013 -0400 summary: Issue #18429: Add user-oriented News entry about Format / Format Paragraph now working with comment block selections. Patch was part of 18226 patch. files: Misc/NEWS | 4 ++++ 1 files changed, 4 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -113,6 +113,10 @@ IDLE ---- +- Issue #18429: Format / Format Paragraph, now works when comment blocks + are selected. As with text blocks, this works best when the selection + only includes complete lines. + - Issue #18226: Add docstrings and unittests for FormatParagraph.py. Original patches by Todd Rovito and Phil Webster. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 23:47:12 2013 From: python-checkins at python.org (terry.reedy) Date: Sat, 10 Aug 2013 23:47:12 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NDI5?= =?utf-8?q?=3A_Add_user-oriented_News_entry_about_Format_/_Format_Paragrap?= =?utf-8?q?h?= Message-ID: <3cCH3N6YyczPYS@mail.python.org> http://hg.python.org/cpython/rev/9eea6401b892 changeset: 85109:9eea6401b892 branch: 3.3 parent: 85106:bfdb687ca485 user: Terry Jan Reedy date: Sat Aug 10 17:46:24 2013 -0400 summary: Issue #18429: Add user-oriented News entry about Format / Format Paragraph now working with comment block selections. Patch was part of 18226 patch. files: Misc/NEWS | 4 ++++ 1 files changed, 4 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -212,6 +212,10 @@ IDLE ---- +- Issue #18429: Format / Format Paragraph, now works when comment blocks + are selected. As with text blocks, this works best when the selection + only includes complete lines. + - Issue #18226: Add docstrings and unittests for FormatParagraph.py. Original patches by Todd Rovito and Phil Webster. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 10 23:47:14 2013 From: python-checkins at python.org (terry.reedy) Date: Sat, 10 Aug 2013 23:47:14 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_with_3=2E3?= Message-ID: <3cCH3Q1S4czQ5y@mail.python.org> http://hg.python.org/cpython/rev/0fce8b90f25b changeset: 85110:0fce8b90f25b parent: 85107:b4a30d329f49 parent: 85109:9eea6401b892 user: Terry Jan Reedy date: Sat Aug 10 17:46:48 2013 -0400 summary: Merge with 3.3 files: Misc/NEWS | 4 ++++ 1 files changed, 4 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -770,6 +770,10 @@ IDLE ---- +- Issue #18429: Format / Format Paragraph, now works when comment blocks + are selected. As with text blocks, this works best when the selection + only includes complete lines. + - Issue #18226: Add docstrings and unittests for FormatParagraph.py. Original patches by Todd Rovito and Phil Webster. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 00:22:40 2013 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 11 Aug 2013 00:22:40 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Fix_refcounting_issue_with?= =?utf-8?q?_extension_types_in_tkinter=2E?= Message-ID: <3cCHrJ2qrpzPJr@mail.python.org> http://hg.python.org/cpython/rev/4d0c938870bc changeset: 85111:4d0c938870bc user: Antoine Pitrou date: Sun Aug 11 00:22:30 2013 +0200 summary: Fix refcounting issue with extension types in tkinter. (issue #15721) files: Misc/NEWS | 2 ++ Modules/_tkinter.c | 9 +++++++++ 2 files changed, 11 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -24,6 +24,8 @@ Library ------- +- Fix refcounting issue with extension types in tkinter. + - Issue #8112: xlmrpc.server's DocXMLRPCServer server no longer raises an error if methods have annotations; it now correctly displays the annotations. diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -516,6 +516,7 @@ v = PyObject_New(TkappObject, (PyTypeObject *) Tkapp_Type); if (v == NULL) return NULL; + Py_INCREF(Tkapp_Type); v->interp = Tcl_CreateInterp(); v->wantobjects = wantobjects; @@ -674,6 +675,7 @@ self = PyObject_New(PyTclObject, (PyTypeObject *) PyTclObject_Type); if (self == NULL) return NULL; + Py_INCREF(PyTclObject_Type); Tcl_IncrRefCount(arg); self->value = arg; self->string = NULL; @@ -683,9 +685,11 @@ static void PyTclObject_dealloc(PyTclObject *self) { + PyObject *tp = (PyObject *) Py_TYPE(self); Tcl_DecrRefCount(self->value); Py_XDECREF(self->string); PyObject_Del(self); + Py_DECREF(tp); } static char* @@ -2196,6 +2200,7 @@ v = PyObject_New(TkttObject, (PyTypeObject *) Tktt_Type); if (v == NULL) return NULL; + Py_INCREF(Tktt_Type); Py_INCREF(func); v->token = NULL; @@ -2211,10 +2216,12 @@ { TkttObject *v = (TkttObject *)self; PyObject *func = v->func; + PyObject *tp = (PyObject *) Py_TYPE(self); Py_XDECREF(func); PyObject_Del(self); + Py_DECREF(tp); } static PyObject * @@ -2520,11 +2527,13 @@ static void Tkapp_Dealloc(PyObject *self) { + PyObject *tp = (PyObject *) Py_TYPE(self); /*CHECK_TCL_APPARTMENT;*/ ENTER_TCL Tcl_DeleteInterp(Tkapp_Interp(self)); LEAVE_TCL PyObject_Del(self); + Py_DECREF(tp); DisableEventHook(); } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 00:31:15 2013 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 11 Aug 2013 00:31:15 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2310241=3A_Clear_ex?= =?utf-8?q?tension_module_dict_copies_at_interpreter_shutdown=2E?= Message-ID: <3cCJ2C2j6DzPtN@mail.python.org> http://hg.python.org/cpython/rev/1edff836c954 changeset: 85112:1edff836c954 user: Antoine Pitrou date: Sun Aug 11 00:30:09 2013 +0200 summary: Issue #10241: Clear extension module dict copies at interpreter shutdown. Patch by Neil Schemenauer, minimally modified. (re-apply after fix for tkinter-related crash) files: Include/pystate.h | 3 +++ Misc/NEWS | 3 +++ Python/import.c | 2 ++ Python/pystate.c | 25 +++++++++++++++++++++++++ 4 files changed, 33 insertions(+), 0 deletions(-) diff --git a/Include/pystate.h b/Include/pystate.h --- a/Include/pystate.h +++ b/Include/pystate.h @@ -134,6 +134,9 @@ PyAPI_FUNC(int) PyState_RemoveModule(struct PyModuleDef*); #endif PyAPI_FUNC(PyObject*) PyState_FindModule(struct PyModuleDef*); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyState_ClearModules(void); +#endif PyAPI_FUNC(PyThreadState *) PyThreadState_New(PyInterpreterState *); PyAPI_FUNC(PyThreadState *) _PyThreadState_Prealloc(PyInterpreterState *); diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -54,6 +54,9 @@ - Issue #17899: Fix rare file descriptor leak in os.listdir(). +- Issue #10241: Clear extension module dict copies at interpreter shutdown. + Patch by Neil Schemenauer, minimally modified. + - Issue #9035: ismount now recognises volumes mounted below a drive root on Windows. Original patch by Atsuo Ishimoto. diff --git a/Python/import.c b/Python/import.c --- a/Python/import.c +++ b/Python/import.c @@ -380,6 +380,8 @@ builtins = interp->builtins; interp->builtins = PyDict_New(); Py_DECREF(builtins); + /* Clear module dict copies stored in the interpreter state */ + _PyState_ClearModules(); /* Collect references */ _PyGC_CollectNoFail(); /* Dump GC stats before it's too late, since it uses the warnings diff --git a/Python/pystate.c b/Python/pystate.c --- a/Python/pystate.c +++ b/Python/pystate.c @@ -320,6 +320,31 @@ return PyList_SetItem(state->modules_by_index, index, Py_None); } +/* used by import.c:PyImport_Cleanup */ +void +_PyState_ClearModules(void) +{ + PyInterpreterState *state = PyThreadState_GET()->interp; + if (state->modules_by_index) { + Py_ssize_t i; + for (i = 0; i < PyList_GET_SIZE(state->modules_by_index); i++) { + PyObject *m = PyList_GET_ITEM(state->modules_by_index, i); + if (PyModule_Check(m)) { + /* cleanup the saved copy of module dicts */ + PyModuleDef *md = PyModule_GetDef(m); + if (md) + Py_CLEAR(md->m_base.m_copy); + } + } + /* Setting modules_by_index to NULL could be dangerous, so we + clear the list instead. */ + if (PyList_SetSlice(state->modules_by_index, + 0, PyList_GET_SIZE(state->modules_by_index), + NULL)) + PyErr_WriteUnraisable(state->modules_by_index); + } +} + void PyThreadState_Clear(PyThreadState *tstate) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 00:40:29 2013 From: python-checkins at python.org (terry.reedy) Date: Sun, 11 Aug 2013 00:40:29 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4Njc2?= =?utf-8?q?=3A_Change_=27positive=27_to_=27non-negative=27_in_queue=2Epy_p?= =?utf-8?q?ut_and_get?= Message-ID: <3cCJDs434GzQ02@mail.python.org> http://hg.python.org/cpython/rev/737b53ec5d1a changeset: 85113:737b53ec5d1a branch: 2.7 parent: 85108:841984e96df6 user: Terry Jan Reedy date: Sat Aug 10 18:17:01 2013 -0400 summary: Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get docstrings and ValueError messages. Patch by Zhongyue Luo files: Lib/Queue.py | 8 ++++---- Misc/ACKS | 1 + Misc/NEWS | 3 +++ 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/Lib/Queue.py b/Lib/Queue.py --- a/Lib/Queue.py +++ b/Lib/Queue.py @@ -109,7 +109,7 @@ If optional args 'block' is true and 'timeout' is None (the default), block if necessary until a free slot is available. If 'timeout' is - a positive number, it blocks at most 'timeout' seconds and raises + a non-negative number, it blocks at most 'timeout' seconds and raises the Full exception if no free slot was available within that time. Otherwise ('block' is false), put an item on the queue if a free slot is immediately available, else raise the Full exception ('timeout' @@ -125,7 +125,7 @@ while self._qsize() == self.maxsize: self.not_full.wait() elif timeout < 0: - raise ValueError("'timeout' must be a positive number") + raise ValueError("'timeout' must be a non-negative number") else: endtime = _time() + timeout while self._qsize() == self.maxsize: @@ -152,7 +152,7 @@ If optional args 'block' is true and 'timeout' is None (the default), block if necessary until an item is available. If 'timeout' is - a positive number, it blocks at most 'timeout' seconds and raises + a non-negative number, it blocks at most 'timeout' seconds and raises the Empty exception if no item was available within that time. Otherwise ('block' is false), return an item if one is immediately available, else raise the Empty exception ('timeout' is ignored @@ -167,7 +167,7 @@ while not self._qsize(): self.not_empty.wait() elif timeout < 0: - raise ValueError("'timeout' must be a positive number") + raise ValueError("'timeout' must be a non-negative number") else: endtime = _time() + timeout while not self._qsize(): diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -629,6 +629,7 @@ Lukas Lueg Loren Luke Fredrik Lundh +Zhongyue Luo Mark Lutz Jim Lynch Mikael Lyngvig diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,9 @@ Library ------- +- Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get + docstrings and ValueError messages. Patch by Zhongyue Luo + - Issue #17998: Fix an internal error in regular expression engine. - Issue #17557: Fix os.getgroups() to work with the modified behavior of -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 00:40:30 2013 From: python-checkins at python.org (terry.reedy) Date: Sun, 11 Aug 2013 00:40:30 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4Njc2?= =?utf-8?q?=3A_Change_=27positive=27_to_=27non-negative=27_in_queue=2Epy_p?= =?utf-8?q?ut_and_get?= Message-ID: <3cCJDt65KCzQkv@mail.python.org> http://hg.python.org/cpython/rev/2122d56d6bc5 changeset: 85114:2122d56d6bc5 branch: 3.3 parent: 85109:9eea6401b892 user: Terry Jan Reedy date: Sat Aug 10 18:17:13 2013 -0400 summary: Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get docstrings and ValueError messages. Patch by Zhongyue Luo files: Lib/queue.py | 8 ++++---- Misc/ACKS | 1 + Misc/NEWS | 3 +++ 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/Lib/queue.py b/Lib/queue.py --- a/Lib/queue.py +++ b/Lib/queue.py @@ -120,7 +120,7 @@ If optional args 'block' is true and 'timeout' is None (the default), block if necessary until a free slot is available. If 'timeout' is - a positive number, it blocks at most 'timeout' seconds and raises + a non-negative number, it blocks at most 'timeout' seconds and raises the Full exception if no free slot was available within that time. Otherwise ('block' is false), put an item on the queue if a free slot is immediately available, else raise the Full exception ('timeout' @@ -135,7 +135,7 @@ while self._qsize() >= self.maxsize: self.not_full.wait() elif timeout < 0: - raise ValueError("'timeout' must be a positive number") + raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while self._qsize() >= self.maxsize: @@ -152,7 +152,7 @@ If optional args 'block' is true and 'timeout' is None (the default), block if necessary until an item is available. If 'timeout' is - a positive number, it blocks at most 'timeout' seconds and raises + a non-negative number, it blocks at most 'timeout' seconds and raises the Empty exception if no item was available within that time. Otherwise ('block' is false), return an item if one is immediately available, else raise the Empty exception ('timeout' is ignored @@ -166,7 +166,7 @@ while not self._qsize(): self.not_empty.wait() elif timeout < 0: - raise ValueError("'timeout' must be a positive number") + raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while not self._qsize(): diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -753,6 +753,7 @@ Lukas Lueg Loren Luke Fredrik Lundh +Zhongyue Luo Mark Lutz Jim Lynch Mikael Lyngvig diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -64,6 +64,9 @@ Library ------- +- Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get + docstrings and ValueError messages. Patch by Zhongyue Luo + - Issue #18681: Fix a NameError in imp.reload() (noticed by Weizhao Li). - Issue #8112: xlmrpc.server's DocXMLRPCServer server no longer raises an error -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 00:40:32 2013 From: python-checkins at python.org (terry.reedy) Date: Sun, 11 Aug 2013 00:40:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318676=3A_Merge_from_3=2E3?= Message-ID: <3cCJDw1C2DzQkv@mail.python.org> http://hg.python.org/cpython/rev/d9a9fe5e700d changeset: 85115:d9a9fe5e700d parent: 85110:0fce8b90f25b parent: 85114:2122d56d6bc5 user: Terry Jan Reedy date: Sat Aug 10 18:23:18 2013 -0400 summary: Issue #18676: Merge from 3.3 files: Lib/queue.py | 8 ++++---- Misc/ACKS | 1 + Misc/NEWS | 5 ++++- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/Lib/queue.py b/Lib/queue.py --- a/Lib/queue.py +++ b/Lib/queue.py @@ -120,7 +120,7 @@ If optional args 'block' is true and 'timeout' is None (the default), block if necessary until a free slot is available. If 'timeout' is - a positive number, it blocks at most 'timeout' seconds and raises + a non-negative number, it blocks at most 'timeout' seconds and raises the Full exception if no free slot was available within that time. Otherwise ('block' is false), put an item on the queue if a free slot is immediately available, else raise the Full exception ('timeout' @@ -135,7 +135,7 @@ while self._qsize() >= self.maxsize: self.not_full.wait() elif timeout < 0: - raise ValueError("'timeout' must be a positive number") + raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while self._qsize() >= self.maxsize: @@ -152,7 +152,7 @@ If optional args 'block' is true and 'timeout' is None (the default), block if necessary until an item is available. If 'timeout' is - a positive number, it blocks at most 'timeout' seconds and raises + a non-negative number, it blocks at most 'timeout' seconds and raises the Empty exception if no item was available within that time. Otherwise ('block' is false), return an item if one is immediately available, else raise the Empty exception ('timeout' is ignored @@ -166,7 +166,7 @@ while not self._qsize(): self.not_empty.wait() elif timeout < 0: - raise ValueError("'timeout' must be a positive number") + raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while not self._qsize(): diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -773,6 +773,7 @@ Lukas Lueg Loren Luke Fredrik Lundh +Zhongyue Luo Mark Lutz Taras Lyapun Jim Lynch diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -24,6 +24,9 @@ Library ------- +- Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get + docstrings and ValueError messages. Patch by Zhongyue Luo + - Issue #8112: xlmrpc.server's DocXMLRPCServer server no longer raises an error if methods have annotations; it now correctly displays the annotations. @@ -821,7 +824,7 @@ Build ----- -- Issue #16067: Add description into MSI file to replace installer's +- Issue #16067: Add description into MSI file to replace installer's temporary name. - Issue #18257: Fix readlink usage in python-config. Install the python -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 00:40:33 2013 From: python-checkins at python.org (terry.reedy) Date: Sun, 11 Aug 2013 00:40:33 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_default_-=3E_default?= =?utf-8?q?=29=3A_Merge_heads?= Message-ID: <3cCJDx39PXzQns@mail.python.org> http://hg.python.org/cpython/rev/febe4f36e020 changeset: 85116:febe4f36e020 parent: 85115:d9a9fe5e700d parent: 85111:4d0c938870bc user: Terry Jan Reedy date: Sat Aug 10 18:33:37 2013 -0400 summary: Merge heads files: Misc/NEWS | 2 ++ Modules/_tkinter.c | 9 +++++++++ 2 files changed, 11 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -27,6 +27,8 @@ - Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get docstrings and ValueError messages. Patch by Zhongyue Luo +- Fix refcounting issue with extension types in tkinter. + - Issue #8112: xlmrpc.server's DocXMLRPCServer server no longer raises an error if methods have annotations; it now correctly displays the annotations. diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -516,6 +516,7 @@ v = PyObject_New(TkappObject, (PyTypeObject *) Tkapp_Type); if (v == NULL) return NULL; + Py_INCREF(Tkapp_Type); v->interp = Tcl_CreateInterp(); v->wantobjects = wantobjects; @@ -674,6 +675,7 @@ self = PyObject_New(PyTclObject, (PyTypeObject *) PyTclObject_Type); if (self == NULL) return NULL; + Py_INCREF(PyTclObject_Type); Tcl_IncrRefCount(arg); self->value = arg; self->string = NULL; @@ -683,9 +685,11 @@ static void PyTclObject_dealloc(PyTclObject *self) { + PyObject *tp = (PyObject *) Py_TYPE(self); Tcl_DecrRefCount(self->value); Py_XDECREF(self->string); PyObject_Del(self); + Py_DECREF(tp); } static char* @@ -2196,6 +2200,7 @@ v = PyObject_New(TkttObject, (PyTypeObject *) Tktt_Type); if (v == NULL) return NULL; + Py_INCREF(Tktt_Type); Py_INCREF(func); v->token = NULL; @@ -2211,10 +2216,12 @@ { TkttObject *v = (TkttObject *)self; PyObject *func = v->func; + PyObject *tp = (PyObject *) Py_TYPE(self); Py_XDECREF(func); PyObject_Del(self); + Py_DECREF(tp); } static PyObject * @@ -2520,11 +2527,13 @@ static void Tkapp_Dealloc(PyObject *self) { + PyObject *tp = (PyObject *) Py_TYPE(self); /*CHECK_TCL_APPARTMENT;*/ ENTER_TCL Tcl_DeleteInterp(Tkapp_Interp(self)); LEAVE_TCL PyObject_Del(self); + Py_DECREF(tp); DisableEventHook(); } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 00:40:34 2013 From: python-checkins at python.org (terry.reedy) Date: Sun, 11 Aug 2013 00:40:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_default_-=3E_default?= =?utf-8?q?=29=3A_Merge?= Message-ID: <3cCJDy5DGfzQkv@mail.python.org> http://hg.python.org/cpython/rev/3fd62c312f1a changeset: 85117:3fd62c312f1a parent: 85112:1edff836c954 parent: 85116:febe4f36e020 user: Terry Jan Reedy date: Sat Aug 10 18:40:04 2013 -0400 summary: Merge files: Lib/queue.py | 8 ++++---- Misc/ACKS | 1 + Misc/NEWS | 5 ++++- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/Lib/queue.py b/Lib/queue.py --- a/Lib/queue.py +++ b/Lib/queue.py @@ -120,7 +120,7 @@ If optional args 'block' is true and 'timeout' is None (the default), block if necessary until a free slot is available. If 'timeout' is - a positive number, it blocks at most 'timeout' seconds and raises + a non-negative number, it blocks at most 'timeout' seconds and raises the Full exception if no free slot was available within that time. Otherwise ('block' is false), put an item on the queue if a free slot is immediately available, else raise the Full exception ('timeout' @@ -135,7 +135,7 @@ while self._qsize() >= self.maxsize: self.not_full.wait() elif timeout < 0: - raise ValueError("'timeout' must be a positive number") + raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while self._qsize() >= self.maxsize: @@ -152,7 +152,7 @@ If optional args 'block' is true and 'timeout' is None (the default), block if necessary until an item is available. If 'timeout' is - a positive number, it blocks at most 'timeout' seconds and raises + a non-negative number, it blocks at most 'timeout' seconds and raises the Empty exception if no item was available within that time. Otherwise ('block' is false), return an item if one is immediately available, else raise the Empty exception ('timeout' is ignored @@ -166,7 +166,7 @@ while not self._qsize(): self.not_empty.wait() elif timeout < 0: - raise ValueError("'timeout' must be a positive number") + raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while not self._qsize(): diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -773,6 +773,7 @@ Lukas Lueg Loren Luke Fredrik Lundh +Zhongyue Luo Mark Lutz Taras Lyapun Jim Lynch diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -24,6 +24,9 @@ Library ------- +- Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get + docstrings and ValueError messages. Patch by Zhongyue Luo + - Fix refcounting issue with extension types in tkinter. - Issue #8112: xlmrpc.server's DocXMLRPCServer server no longer raises an error @@ -826,7 +829,7 @@ Build ----- -- Issue #16067: Add description into MSI file to replace installer's +- Issue #16067: Add description into MSI file to replace installer's temporary name. - Issue #18257: Fix readlink usage in python-config. Install the python -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 02:59:19 2013 From: python-checkins at python.org (terry.reedy) Date: Sun, 11 Aug 2013 02:59:19 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2315301=3A_skip_new?= =?utf-8?q?_test_method_so_Windows_builtbots_stop_failing=2E?= Message-ID: <3cCMK32h4RzQkv@mail.python.org> http://hg.python.org/cpython/rev/9b0e9e9812f8 changeset: 85118:9b0e9e9812f8 user: Terry Jan Reedy date: Sat Aug 10 20:58:59 2013 -0400 summary: Issue #15301: skip new test method so Windows builtbots stop failing. files: Lib/test/test_os.py | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -867,6 +867,7 @@ os.makedirs(path, mode=mode, exist_ok=True) os.umask(old_mask) + @unittest.skipUnless(hasattr(os, 'chown'), 'test needs os.chown') def test_chown_uid_gid_arguments_must_be_index(self): stat = os.stat(support.TESTFN) uid = stat.st_uid -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sun Aug 11 05:47:37 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sun, 11 Aug 2013 05:47:37 +0200 Subject: [Python-checkins] Daily reference leaks (9b0e9e9812f8): sum=-1 Message-ID: results for 9b0e9e9812f8 on branch "default" -------------------------------------------- test_support leaked [0, 0, -1] references, sum=-1 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogAEkz_q', '-x'] From python-checkins at python.org Sun Aug 11 12:07:05 2013 From: python-checkins at python.org (ezio.melotti) Date: Sun, 11 Aug 2013 12:07:05 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NjYzOiBkb2N1?= =?utf-8?q?ment_that_assertAlmostEqual_also_works_when_the_values_are_equa?= =?utf-8?q?l?= Message-ID: <3cCbT51F0Rz7Lk6@mail.python.org> http://hg.python.org/cpython/rev/e0f86c3b3685 changeset: 85119:e0f86c3b3685 branch: 3.3 parent: 85114:2122d56d6bc5 user: Ezio Melotti date: Sun Aug 11 13:04:50 2013 +0300 summary: #18663: document that assertAlmostEqual also works when the values are equal and add tests. files: Doc/library/unittest.rst | 2 +- Lib/unittest/test/test_assertions.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletions(-) diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -1000,7 +1000,7 @@ like the :func:`round` function) and not *significant digits*. If *delta* is supplied instead of *places* then the difference - between *first* and *second* must be less (or more) than *delta*. + between *first* and *second* must be less or equal to (or greater than) *delta*. Supplying both *delta* and *places* raises a ``TypeError``. diff --git a/Lib/unittest/test/test_assertions.py b/Lib/unittest/test/test_assertions.py --- a/Lib/unittest/test/test_assertions.py +++ b/Lib/unittest/test/test_assertions.py @@ -34,6 +34,10 @@ self.assertNotAlmostEqual(1.1, 1.0, delta=0.05) self.assertNotAlmostEqual(1.0, 1.1, delta=0.05) + self.assertAlmostEqual(1.0, 1.0, delta=0.5) + self.assertRaises(self.failureException, self.assertNotAlmostEqual, + 1.0, 1.0, delta=0.5) + self.assertRaises(self.failureException, self.assertAlmostEqual, 1.1, 1.0, delta=0.05) self.assertRaises(self.failureException, self.assertNotAlmostEqual, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 12:07:06 2013 From: python-checkins at python.org (ezio.melotti) Date: Sun, 11 Aug 2013 12:07:06 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NjYzOiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cCbT634gjz7LkJ@mail.python.org> http://hg.python.org/cpython/rev/eeda59e08c83 changeset: 85120:eeda59e08c83 parent: 85118:9b0e9e9812f8 parent: 85119:e0f86c3b3685 user: Ezio Melotti date: Sun Aug 11 13:05:37 2013 +0300 summary: #18663: merge with 3.3. files: Doc/library/unittest.rst | 2 +- Lib/unittest/test/test_assertions.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletions(-) diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -1077,7 +1077,7 @@ like the :func:`round` function) and not *significant digits*. If *delta* is supplied instead of *places* then the difference - between *first* and *second* must be less (or more) than *delta*. + between *first* and *second* must be less or equal to (or greater than) *delta*. Supplying both *delta* and *places* raises a ``TypeError``. diff --git a/Lib/unittest/test/test_assertions.py b/Lib/unittest/test/test_assertions.py --- a/Lib/unittest/test/test_assertions.py +++ b/Lib/unittest/test/test_assertions.py @@ -34,6 +34,10 @@ self.assertNotAlmostEqual(1.1, 1.0, delta=0.05) self.assertNotAlmostEqual(1.0, 1.1, delta=0.05) + self.assertAlmostEqual(1.0, 1.0, delta=0.5) + self.assertRaises(self.failureException, self.assertNotAlmostEqual, + 1.0, 1.0, delta=0.5) + self.assertRaises(self.failureException, self.assertAlmostEqual, 1.1, 1.0, delta=0.05) self.assertRaises(self.failureException, self.assertNotAlmostEqual, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 12:07:07 2013 From: python-checkins at python.org (ezio.melotti) Date: Sun, 11 Aug 2013 12:07:07 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogIzE4NjYzOiBkb2N1?= =?utf-8?q?ment_that_assertAlmostEqual_also_works_when_the_values_are_equa?= =?utf-8?q?l?= Message-ID: <3cCbT74ypwz7LkQ@mail.python.org> http://hg.python.org/cpython/rev/9ddc63c039ba changeset: 85121:9ddc63c039ba branch: 2.7 parent: 85113:737b53ec5d1a user: Ezio Melotti date: Sun Aug 11 13:04:50 2013 +0300 summary: #18663: document that assertAlmostEqual also works when the values are equal and add tests. files: Doc/library/unittest.rst | 2 +- Lib/unittest/test/test_assertions.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletions(-) diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -1017,7 +1017,7 @@ like the :func:`round` function) and not *significant digits*. If *delta* is supplied instead of *places* then the difference - between *first* and *second* must be less (or more) than *delta*. + between *first* and *second* must be less or equal to (or greater than) *delta*. Supplying both *delta* and *places* raises a ``TypeError``. diff --git a/Lib/unittest/test/test_assertions.py b/Lib/unittest/test/test_assertions.py --- a/Lib/unittest/test/test_assertions.py +++ b/Lib/unittest/test/test_assertions.py @@ -33,6 +33,10 @@ self.assertNotAlmostEqual(1.1, 1.0, delta=0.05) self.assertNotAlmostEqual(1.0, 1.1, delta=0.05) + self.assertAlmostEqual(1.0, 1.0, delta=0.5) + self.assertRaises(self.failureException, self.assertNotAlmostEqual, + 1.0, 1.0, delta=0.5) + self.assertRaises(self.failureException, self.assertAlmostEqual, 1.1, 1.0, delta=0.05) self.assertRaises(self.failureException, self.assertNotAlmostEqual, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 19:14:25 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sun, 11 Aug 2013 19:14:25 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzA2?= =?utf-8?q?=3A_Fix_a_test_for_issue_=2318681_so_it_no_longer_breaks?= Message-ID: <3cCmy93j80zRMX@mail.python.org> http://hg.python.org/cpython/rev/dab790a17c4d changeset: 85122:dab790a17c4d branch: 3.3 parent: 85119:e0f86c3b3685 user: Serhiy Storchaka date: Sun Aug 11 20:12:20 2013 +0300 summary: Issue #18706: Fix a test for issue #18681 so it no longer breaks test_codeccallbacks*. files: Lib/test/test_imp.py | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -278,8 +278,9 @@ def test_with_deleted_parent(self): # see #18681 from html import parser - del sys.modules['html'] - def cleanup(): del sys.modules['html.parser'] + html = sys.modules.pop('html') + def cleanup(): + sys.modules['html'] = html self.addCleanup(cleanup) with self.assertRaisesRegex(ImportError, 'html'): imp.reload(parser) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 19:14:26 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sun, 11 Aug 2013 19:14:26 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318706=3A_Fix_a_test_for_issue_=2318681_so_it_no?= =?utf-8?q?_longer_breaks?= Message-ID: <3cCmyB6jkqzRMk@mail.python.org> http://hg.python.org/cpython/rev/1f4aed2c914c changeset: 85123:1f4aed2c914c parent: 85120:eeda59e08c83 parent: 85122:dab790a17c4d user: Serhiy Storchaka date: Sun Aug 11 20:13:36 2013 +0300 summary: Issue #18706: Fix a test for issue #18681 so it no longer breaks test_codeccallbacks*. files: Lib/test/test_imp.py | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -317,8 +317,9 @@ def test_with_deleted_parent(self): # see #18681 from html import parser - del sys.modules['html'] - def cleanup(): del sys.modules['html.parser'] + html = sys.modules.pop('html') + def cleanup(): + sys.modules['html'] = html self.addCleanup(cleanup) with self.assertRaisesRegex(ImportError, 'html'): imp.reload(parser) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 11 22:08:48 2013 From: python-checkins at python.org (victor.stinner) Date: Sun, 11 Aug 2013 22:08:48 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_fix_typo?= Message-ID: <3cCrqN3cxjzMcm@mail.python.org> http://hg.python.org/peps/rev/bad213d6e836 changeset: 5050:bad213d6e836 user: Victor Stinner date: Sun Aug 11 22:07:33 2013 +0200 summary: PEP 446: fix typo Mention also Mac OS 10.8 for the O_CLOEXEC flag (it was already mentionned in the summary table). files: pep-0446.txt | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -19,7 +19,7 @@ not possible in some cases, and has poor performances on some platforms. This PEP proposes to make all file descriptors created by Python -non-inheritable by default to reduces the risk of these issues. This PEP +non-inheritable by default to reduce the risk of these issues. This PEP fixes also a race condition in multithreaded applications on operating systems supporting atomic flags to create non-inheritable file descriptors. @@ -247,8 +247,8 @@ On UNIX, new flags were added for files and sockets: * ``O_CLOEXEC``: available on Linux (2.6.23), FreeBSD (8.3), - OpenBSD 5.0, Solaris 11, QNX, BeOS, next NetBSD release (6.1?). - This flag is part of POSIX.1-2008. + Mac OS 10.8, OpenBSD 5.0, Solaris 11, QNX, BeOS, next NetBSD release + (6.1?). This flag is part of POSIX.1-2008. * ``SOCK_CLOEXEC`` flag for ``socket()`` and ``socketpair()``, available on Linux 2.6.27, OpenBSD 5.2, NetBSD 6.0. * ``fcntl()``: ``F_DUPFD_CLOEXEC`` flag, available on Linux 2.6.24, -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sun Aug 11 22:08:49 2013 From: python-checkins at python.org (victor.stinner) Date: Sun, 11 Aug 2013 22:08:49 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_clarify_the_status?= =?utf-8?q?_of_python_3=2E3_on_Windows?= Message-ID: <3cCrqP5KS8zRVX@mail.python.org> http://hg.python.org/peps/rev/f7ff2b1e3a74 changeset: 5051:f7ff2b1e3a74 user: Victor Stinner date: Sun Aug 11 22:08:03 2013 +0200 summary: PEP 446: clarify the status of python 3.3 on Windows Be more explicit files: pep-0446.txt | 34 +++++++++++++++++++--------------- 1 files changed, 19 insertions(+), 15 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -318,29 +318,31 @@ On Windows, the ``subprocess`` closes all handles and file descriptors in the child process by default. If at least one standard stream (stdin, stdout or stderr) is replaced (ex: redirected into a pipe), all -inheritable handles are inherited in the child process. +inheritable handles and file descriptors 0, 1 and 2 are inherited in the +child process. -All inheritable file descriptors are inherited by the child process -using the functions of the ``os.execv*()`` and ``os.spawn*()`` families. +Using the functions of the ``os.execv*()`` and ``os.spawn*()`` families, +all inheritable handles and all inheritable file descriptors are +inherited by the child process. On UNIX, the ``multiprocessing`` module uses ``os.fork()`` and so all file descriptors are inherited by child processes. -On Windows, all inheritable handles are inherited by the child process -using the ``multiprocessing`` module, all file descriptors except -standard streams are closed. +On Windows, all inheritable handles and file descriptors 0, 1 and 2 are +inherited by the child process using the ``multiprocessing`` module, all +file descriptors except standard streams are closed. Summary: -=========================== ============= ================== ============= -Module FD on UNIX Handles on Windows FD on Windows -=========================== ============= ================== ============= -subprocess, default STD, pass_fds none STD -subprocess, replace stdout STD, pass_fds all STD -subprocess, close_fds=False all all STD -multiprocessing all all STD -os.execv(), os.spawn() all all all -=========================== ============= ================== ============= +=========================== ================ ================== ============= +Module FD on UNIX Handles on Windows FD on Windows +=========================== ================ ================== ============= +subprocess, default STD, pass_fds none STD +subprocess, replace stdout STD, pass_fds all STD +subprocess, close_fds=False all all STD +multiprocessing (not applicable) all STD +os.execv(), os.spawn() all all all +=========================== ================ ================== ============= Legend: @@ -351,6 +353,8 @@ inherited in the child process * "pass_fds": file descriptors of the *pass_fds* parameter of the subprocess are inherited +* "(not applicable)": on UNIX, the multiprocessing uses ``fork()``, + so this case is not concerned by this PEP. Performances of Closing All File Descriptors -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sun Aug 11 22:08:50 2013 From: python-checkins at python.org (victor.stinner) Date: Sun, 11 Aug 2013 22:08:50 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_closing_all_file_d?= =?utf-8?q?escriptors_between_fork=28=29_and_exec=28=29_is_not_reliable?= Message-ID: <3cCrqQ6v3RzRcl@mail.python.org> http://hg.python.org/peps/rev/36961c29aa1b changeset: 5052:36961c29aa1b user: Victor Stinner date: Sun Aug 11 22:08:38 2013 +0200 summary: PEP 446: closing all file descriptors between fork() and exec() is not reliable in a multithreaded application files: pep-0446.txt | 16 +++++++++++++--- 1 files changed, 13 insertions(+), 3 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -357,14 +357,20 @@ so this case is not concerned by this PEP. -Performances of Closing All File Descriptors --------------------------------------------- +Closing All Open File Descriptors +--------------------------------- On UNIX, the ``subprocess`` module closes almost all file descriptors in the child process. This operation require MAXFD system calls, where MAXFD is the maximum number of file descriptors, even if there are only few open file descriptors. This maximum can be read using: -``sysconf("SC_OPEN_MAX")``. +``os.sysconf("SC_OPEN_MAX")``. + +There is no portable nor reliable function to close all open file +descriptors between ``fork()`` and ``execv()``. Another thread may +create an inheritable file descriptors while we are closing existing +file descriptors. Holding the CPython GIL reduces the risk of the race +condition. The operation can be slow if MAXFD is large. For example, on a FreeBSD buildbot with ``MAXFD=655,000``, the operation took 300 ms: see @@ -375,6 +381,10 @@ ``/proc//fd/``, and so performances depends on the number of open file descriptors, not on MAXFD. +FreeBSD, OpenBSD and Solaris provide a ``closefrom()`` function. It +cannot be used by the ``subprocess`` module when the *pass_fds* +parameter is a non-empty list of file descriptors. + See also: * `Python issue #1663329 `_: -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sun Aug 11 22:57:49 2013 From: python-checkins at python.org (victor.stinner) Date: Sun, 11 Aug 2013 22:57:49 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_change_os=2Edup2?= =?utf-8?b?KCk=?= Message-ID: <3cCsvx09gBzRVD@mail.python.org> http://hg.python.org/peps/rev/29fa2a8b4736 changeset: 5053:29fa2a8b4736 user: Victor Stinner date: Sun Aug 11 22:57:36 2013 +0200 summary: PEP 446: change os.dup2() files: pep-0446.txt | 3 +-- 1 files changed, 1 insertions(+), 2 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -450,8 +450,7 @@ flag of file descriptors is unchanged in the parent process. * ``os.dup2(fd, fd2)`` makes *fd2* inheritable if *fd2* is ``0`` - (stdin), ``1`` (stdout) or ``2`` (stderr) and *fd2* is different than - *fd*. + (stdin), ``1`` (stdout) or ``2`` (stderr). Since Python should only create non-inheritable file descriptors, it is safe to use subprocess with the *close_fds* parameter set to ``False``. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Mon Aug 12 00:44:12 2013 From: python-checkins at python.org (eli.bendersky) Date: Mon, 12 Aug 2013 00:44:12 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzEyNjQ1?= =?utf-8?q?=3A_Clarify_and_reformat_the_documentation_of_import=5Ffresh=5F?= =?utf-8?q?module?= Message-ID: <3cCwGh1xjMzSWv@mail.python.org> http://hg.python.org/cpython/rev/edaf44136d32 changeset: 85124:edaf44136d32 branch: 3.3 parent: 85122:dab790a17c4d user: Eli Bendersky date: Sun Aug 11 15:38:08 2013 -0700 summary: Issue #12645: Clarify and reformat the documentation of import_fresh_module files: Doc/library/test.rst | 14 ++++---- Lib/test/support/__init__.py | 36 ++++++++++++++++------- 2 files changed, 32 insertions(+), 18 deletions(-) diff --git a/Doc/library/test.rst b/Doc/library/test.rst --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -489,7 +489,7 @@ *fresh* is an iterable of additional module names that are also removed from the ``sys.modules`` cache before doing the import. - *blocked* is an iterable of module names that are replaced with :const:`0` + *blocked* is an iterable of module names that are replaced with ``None`` in the module cache during the import to ensure that attempts to import them raise :exc:`ImportError`. @@ -500,15 +500,15 @@ Module and package deprecation messages are suppressed during this import if *deprecated* is ``True``. - This function will raise :exc:`unittest.SkipTest` if the named module - cannot be imported. + This function will raise :exc:`ImportError` if the named module cannot be + imported. Example use:: - # Get copies of the warnings module for testing without - # affecting the version being used by the rest of the test suite - # One copy uses the C implementation, the other is forced to use - # the pure Python fallback implementation + # Get copies of the warnings module for testing without affecting the + # version being used by the rest of the test suite. One copy uses the + # C implementation, the other is forced to use the pure Python fallback + # implementation py_warnings = import_fresh_module('warnings', blocked=['_warnings']) c_warnings = import_fresh_module('warnings', fresh=['_warnings']) diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -124,7 +124,8 @@ def _save_and_remove_module(name, orig_modules): """Helper function to save and remove a module from sys.modules - Raise ImportError if the module can't be imported.""" + Raise ImportError if the module can't be imported. + """ # try to import the module and raise an error if it can't be imported if name not in sys.modules: __import__(name) @@ -137,7 +138,8 @@ def _save_and_block_module(name, orig_modules): """Helper function to save and block a module in sys.modules - Return True if the module was in sys.modules, False otherwise.""" + Return True if the module was in sys.modules, False otherwise. + """ saved = True try: orig_modules[name] = sys.modules[name] @@ -159,18 +161,30 @@ def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): - """Imports and returns a module, deliberately bypassing the sys.modules cache - and importing a fresh copy of the module. Once the import is complete, - the sys.modules cache is restored to its original state. + """Import and return a module, deliberately bypassing sys.modules. - Modules named in fresh are also imported anew if needed by the import. - If one of these modules can't be imported, None is returned. + This function imports and returns a fresh copy of the named Python module + by removing the named module from sys.modules before doing the import. + Note that unlike reload, the original module is not affected by + this operation. - Importing of modules named in blocked is prevented while the fresh import - takes place. + *fresh* is an iterable of additional module names that are also removed + from the sys.modules cache before doing the import. - If deprecated is True, any module or package deprecation messages - will be suppressed.""" + *blocked* is an iterable of module names that are replaced with None + in the module cache during the import to ensure that attempts to import + them raise ImportError. + + The named module and any modules named in the *fresh* and *blocked* + parameters are saved before starting the import and then reinserted into + sys.modules when the fresh import is complete. + + Module and package deprecation messages are suppressed during this import + if *deprecated* is True. + + This function will raise ImportError if the named module cannot be + imported. + """ # NOTE: test_heapq, test_json and test_warnings include extra sanity checks # to make sure that this utility function is working as expected with _ignore_deprecated_imports(deprecated): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 00:44:13 2013 From: python-checkins at python.org (eli.bendersky) Date: Mon, 12 Aug 2013 00:44:13 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Close_=2312645=3A_Clarify_and_reformat_the_documentation?= =?utf-8?q?_of_import=5Ffresh=5Fmodule?= Message-ID: <3cCwGj4xK4zSX1@mail.python.org> http://hg.python.org/cpython/rev/d8000009ef0e changeset: 85125:d8000009ef0e parent: 85123:1f4aed2c914c parent: 85124:edaf44136d32 user: Eli Bendersky date: Sun Aug 11 15:43:30 2013 -0700 summary: Close #12645: Clarify and reformat the documentation of import_fresh_module files: Doc/library/test.rst | 14 ++++---- Lib/test/support/__init__.py | 37 ++++++++++++++++------- 2 files changed, 32 insertions(+), 19 deletions(-) diff --git a/Doc/library/test.rst b/Doc/library/test.rst --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -489,7 +489,7 @@ *fresh* is an iterable of additional module names that are also removed from the ``sys.modules`` cache before doing the import. - *blocked* is an iterable of module names that are replaced with :const:`0` + *blocked* is an iterable of module names that are replaced with ``None`` in the module cache during the import to ensure that attempts to import them raise :exc:`ImportError`. @@ -500,15 +500,15 @@ Module and package deprecation messages are suppressed during this import if *deprecated* is ``True``. - This function will raise :exc:`unittest.SkipTest` if the named module - cannot be imported. + This function will raise :exc:`ImportError` if the named module cannot be + imported. Example use:: - # Get copies of the warnings module for testing without - # affecting the version being used by the rest of the test suite - # One copy uses the C implementation, the other is forced to use - # the pure Python fallback implementation + # Get copies of the warnings module for testing without affecting the + # version being used by the rest of the test suite. One copy uses the + # C implementation, the other is forced to use the pure Python fallback + # implementation py_warnings = import_fresh_module('warnings', blocked=['_warnings']) c_warnings = import_fresh_module('warnings', fresh=['_warnings']) diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -130,8 +130,8 @@ def _save_and_remove_module(name, orig_modules): """Helper function to save and remove a module from sys.modules - Raise ImportError if the module can't be imported. - """ + Raise ImportError if the module can't be imported. + """ # try to import the module and raise an error if it can't be imported if name not in sys.modules: __import__(name) @@ -144,7 +144,8 @@ def _save_and_block_module(name, orig_modules): """Helper function to save and block a module in sys.modules - Return True if the module was in sys.modules, False otherwise.""" + Return True if the module was in sys.modules, False otherwise. + """ saved = True try: orig_modules[name] = sys.modules[name] @@ -166,18 +167,30 @@ def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): - """Imports and returns a module, deliberately bypassing the sys.modules cache - and importing a fresh copy of the module. Once the import is complete, - the sys.modules cache is restored to its original state. + """Import and return a module, deliberately bypassing sys.modules. - Modules named in fresh are also imported anew if needed by the import. - If one of these modules can't be imported, None is returned. + This function imports and returns a fresh copy of the named Python module + by removing the named module from sys.modules before doing the import. + Note that unlike reload, the original module is not affected by + this operation. - Importing of modules named in blocked is prevented while the fresh import - takes place. + *fresh* is an iterable of additional module names that are also removed + from the sys.modules cache before doing the import. - If deprecated is True, any module or package deprecation messages - will be suppressed.""" + *blocked* is an iterable of module names that are replaced with None + in the module cache during the import to ensure that attempts to import + them raise ImportError. + + The named module and any modules named in the *fresh* and *blocked* + parameters are saved before starting the import and then reinserted into + sys.modules when the fresh import is complete. + + Module and package deprecation messages are suppressed during this import + if *deprecated* is True. + + This function will raise ImportError if the named module cannot be + imported. + """ # NOTE: test_heapq, test_json and test_warnings include extra sanity checks # to make sure that this utility function is working as expected with _ignore_deprecated_imports(deprecated): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 01:49:23 2013 From: python-checkins at python.org (eli.bendersky) Date: Mon, 12 Aug 2013 01:49:23 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Add_some_docstrings=2C_cla?= =?utf-8?q?rify_others=2C_and_fix_formatting=2E?= Message-ID: <3cCxjv4G7wzSYy@mail.python.org> http://hg.python.org/cpython/rev/9e61563edb67 changeset: 85126:9e61563edb67 user: Eli Bendersky date: Sun Aug 11 16:48:44 2013 -0700 summary: Add some docstrings, clarify others, and fix formatting. files: Lib/test/script_helper.py | 14 +++++++++++--- 1 files changed, 11 insertions(+), 3 deletions(-) diff --git a/Lib/test/script_helper.py b/Lib/test/script_helper.py --- a/Lib/test/script_helper.py +++ b/Lib/test/script_helper.py @@ -39,7 +39,7 @@ p.stdout.close() p.stderr.close() rc = p.returncode - err = strip_python_stderr(err) + err = strip_python_stderr(err) if (rc and expected_success) or (not rc and not expected_success): raise AssertionError( "Process return code is %d, " @@ -49,18 +49,25 @@ def assert_python_ok(*args, **env_vars): """ Assert that running the interpreter with `args` and optional environment - variables `env_vars` is ok and return a (return code, stdout, stderr) tuple. + variables `env_vars` succeeds (rc == 0) and return a (return code, stdout, + stderr) tuple. """ return _assert_python(True, *args, **env_vars) def assert_python_failure(*args, **env_vars): """ Assert that running the interpreter with `args` and optional environment - variables `env_vars` fails and return a (return code, stdout, stderr) tuple. + variables `env_vars` fails (rc != 0) and return a (return code, stdout, + stderr) tuple. """ return _assert_python(False, *args, **env_vars) def spawn_python(*args, **kw): + """Run a Python subprocess with the given arguments. + + kw is extra keyword args to pass to subprocess.Popen. Returns a Popen + object. + """ cmd_line = [sys.executable, '-E'] cmd_line.extend(args) return subprocess.Popen(cmd_line, stdin=subprocess.PIPE, @@ -68,6 +75,7 @@ **kw) def kill_python(p): + """Run the given Popen process until completion and return stdout.""" p.stdin.close() data = p.stdout.read() p.stdout.close() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 03:31:43 2013 From: python-checkins at python.org (victor.stinner) Date: Mon, 12 Aug 2013 03:31:43 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_fix_reST_syntax?= Message-ID: <3cCzzz5MMVzRCZ@mail.python.org> http://hg.python.org/peps/rev/021e2cef281e changeset: 5054:021e2cef281e user: Victor Stinner date: Mon Aug 12 03:31:34 2013 +0200 summary: PEP 446: fix reST syntax files: pep-0446.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -335,7 +335,7 @@ Summary: =========================== ================ ================== ============= -Module FD on UNIX Handles on Windows FD on Windows +Module FD on UNIX Handles on Windows FD on Windows =========================== ================ ================== ============= subprocess, default STD, pass_fds none STD subprocess, replace stdout STD, pass_fds all STD -- Repository URL: http://hg.python.org/peps From solipsis at pitrou.net Mon Aug 12 05:49:11 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Mon, 12 Aug 2013 05:49:11 +0200 Subject: [Python-checkins] Daily reference leaks (9e61563edb67): sum=0 Message-ID: results for 9e61563edb67 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogC0Nx5r', '-x'] From python-checkins at python.org Mon Aug 12 15:51:49 2013 From: python-checkins at python.org (ethan.furman) Date: Mon, 12 Aug 2013 15:51:49 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_18693=3A_Put_custom_?= =?utf-8?q?=5F=5Fdir=5F=5F_back_in_place=2E__Will_instead_look_at_fixing?= Message-ID: <3cDJPx3Yylz7Lkn@mail.python.org> http://hg.python.org/cpython/rev/39697dcd97e3 changeset: 85127:39697dcd97e3 user: Ethan Furman date: Mon Aug 12 06:51:41 2013 -0700 summary: Issue 18693: Put custom __dir__ back in place. Will instead look at fixing `help()`. files: Lib/enum.py | 6 ++++++ Lib/test/test_enum.py | 15 +++++++++++++++ 2 files changed, 21 insertions(+), 0 deletions(-) diff --git a/Lib/enum.py b/Lib/enum.py --- a/Lib/enum.py +++ b/Lib/enum.py @@ -223,6 +223,9 @@ def __contains__(cls, member): return isinstance(member, cls) and member.name in cls._member_map_ + def __dir__(self): + return ['__class__', '__doc__', '__members__'] + self._member_names_ + @property def __members__(cls): """Returns a mapping of member name->value. @@ -430,6 +433,9 @@ def __str__(self): return "%s.%s" % (self.__class__.__name__, self._name_) + def __dir__(self): + return (['__class__', '__doc__', 'name', 'value']) + def __eq__(self, other): if type(other) is self.__class__: return self is other diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py --- a/Lib/test/test_enum.py +++ b/Lib/test/test_enum.py @@ -67,6 +67,21 @@ WINTER = 4 self.Season = Season + def test_dir_on_class(self): + Season = self.Season + self.assertEqual( + set(dir(Season)), + set(['__class__', '__doc__', '__members__', + 'SPRING', 'SUMMER', 'AUTUMN', 'WINTER']), + ) + + def test_dir_on_item(self): + Season = self.Season + self.assertEqual( + set(dir(Season.WINTER)), + set(['__class__', '__doc__', 'name', 'value']), + ) + def test_enum_in_enum_out(self): Season = self.Season self.assertIs(Season(Season.WINTER), Season.WINTER) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 19:29:21 2013 From: python-checkins at python.org (brett.cannon) Date: Mon, 12 Aug 2013 19:29:21 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Closes_issue_=2318598=3A_H?= =?utf-8?q?ave_the_exception_message_for?= Message-ID: <3cDPDx68KyzNWm@mail.python.org> http://hg.python.org/cpython/rev/2294594fbe6c changeset: 85128:2294594fbe6c user: Brett Cannon date: Mon Aug 12 13:29:11 2013 -0400 summary: Closes issue #18598: Have the exception message for importlib.import_module() include the name of the module when the 'package' argument is missing but needed. files: Lib/importlib/__init__.py | 4 +++- Misc/NEWS | 3 +++ 2 files changed, 6 insertions(+), 1 deletions(-) diff --git a/Lib/importlib/__init__.py b/Lib/importlib/__init__.py --- a/Lib/importlib/__init__.py +++ b/Lib/importlib/__init__.py @@ -85,7 +85,9 @@ level = 0 if name.startswith('.'): if not package: - raise TypeError("relative imports require the 'package' argument") + msg = ("the 'package' argument is required to perform a relative " + "import for {!r}") + raise TypeError(msg.format(name)) for character in name: if character != '.': break diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -24,6 +24,9 @@ Library ------- +- Issue #18598: Tweak exception message for importlib.import_module() to + include the module name when a key argument is missing. + - Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get docstrings and ValueError messages. Patch by Zhongyue Luo -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 19:50:03 2013 From: python-checkins at python.org (larry.hastings) Date: Mon, 12 Aug 2013 19:50:03 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NjY3?= =?utf-8?q?=3A_Add_missing_=22HAVE=5FFCHOWNAT=22_symbol_to_posix=2E=5Fhave?= =?utf-8?q?=5Ffunctions=2E?= Message-ID: <3cDPhq6n64zSVJ@mail.python.org> http://hg.python.org/cpython/rev/a89226508a04 changeset: 85129:a89226508a04 branch: 3.3 parent: 85124:edaf44136d32 user: Larry Hastings date: Mon Aug 12 13:49:30 2013 -0400 summary: Issue #18667: Add missing "HAVE_FCHOWNAT" symbol to posix._have_functions. files: Misc/NEWS | 2 ++ Modules/posixmodule.c | 4 ++++ 2 files changed, 6 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -12,6 +12,8 @@ Core and Builtins ----------------- +- Issue #18667: Add missing "HAVE_FCHOWNAT" symbol to posix._have_functions. + - Issue #18368: PyOS_StdioReadline() no longer leaks memory when realloc() fails. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -11965,6 +11965,10 @@ "HAVE_FCHOWN", #endif +#ifdef HAVE_FCHOWNAT + "HAVE_FCHOWNAT", +#endif + #ifdef HAVE_FEXECVE "HAVE_FEXECVE", #endif -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 19:53:37 2013 From: python-checkins at python.org (larry.hastings) Date: Mon, 12 Aug 2013 19:53:37 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318667=3A_Add_missing_=22HAVE=5FFCHOWNAT=22_symb?= =?utf-8?q?ol_to_posix=2E=5Fhave=5Ffunctions=2E?= Message-ID: <3cDPmx5hpSzSVJ@mail.python.org> http://hg.python.org/cpython/rev/92de1a5dc3ea changeset: 85130:92de1a5dc3ea parent: 85128:2294594fbe6c parent: 85129:a89226508a04 user: Larry Hastings date: Mon Aug 12 13:53:20 2013 -0400 summary: Issue #18667: Add missing "HAVE_FCHOWNAT" symbol to posix._have_functions. files: Misc/NEWS | 2 ++ Modules/posixmodule.c | 4 ++++ 2 files changed, 6 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #18667: Add missing "HAVE_FCHOWNAT" symbol to posix._have_functions. + - Issue #16499: Add command line option for isolated mode. - Issue #15301: Parsing fd, uid, and gid parameters for builtins diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -11567,6 +11567,10 @@ "HAVE_FCHOWN", #endif +#ifdef HAVE_FCHOWNAT + "HAVE_FCHOWNAT", +#endif + #ifdef HAVE_FEXECVE "HAVE_FEXECVE", #endif -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 20:48:24 2013 From: python-checkins at python.org (antoine.pitrou) Date: Mon, 12 Aug 2013 20:48:24 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Add_whatsnew_entries_for_3?= =?utf-8?b?LjQu?= Message-ID: <3cDR084rQbzShd@mail.python.org> http://hg.python.org/cpython/rev/ee20be0dc539 changeset: 85131:ee20be0dc539 user: Antoine Pitrou date: Mon Aug 12 20:46:47 2013 +0200 summary: Add whatsnew entries for 3.4. files: Doc/whatsnew/3.4.rst | 113 ++++++++++++++++++++++++++++-- 1 files changed, 103 insertions(+), 10 deletions(-) diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -94,14 +94,21 @@ New built-in features: -* None yet. +* :ref:`PEP 442: Safe object finalization `. +* :ref:`PEP 445: Configurable memory allocators `. Implementation improvements: -* A more efficient :mod:`marshal` format . +* A more efficient :mod:`marshal` format (:issue:`16475`). +* Improve finalization of Python modules to avoid setting their globals + to None, in most cases (:issue:`18214`). Significantly Improved Library Modules: +* Single-dispatch generic functions (:pep:`443`) +* SHA-3 (Keccak) support for :mod:`hashlib`. +* TLSv1.1 and TLSv1.2 support for :mod:`ssl`. + * SHA-3 (Keccak) support for :mod:`hashlib`. * TLSv1.1 and TLSv1.2 support for :mod:`ssl`. @@ -112,6 +119,10 @@ Please read on for a comprehensive list of user-facing changes. +.. _pep-445: + +PEP 445: Add new APIs to customize Python memory allocators +=========================================================== PEP 445: Add new APIs to customize Python memory allocators =========================================================== @@ -147,6 +158,8 @@ to specify the value they return if the iterable they are evaluating has no elements. Contributed by Julian Berman in :issue:`18111`. +* Module objects are now :mod:`weakref`'able. + New Modules =========== @@ -222,6 +235,26 @@ New :func:`functools.singledispatch` decorator: see the :pep:`443`. +mmap +---- + +mmap objects can now be weakref'ed. + +(Contributed by Valerie Lambert in :issue:`4885`.) + + +poplib +------ + +New :meth:`~poplib.POP3.stls` method to switch a clear-text POP3 session into +an encrypted POP3 session. + +New :meth:`~poplib.POP3.capa` method to query the capabilities advertised by the +POP3 server. + +(Contributed by Lorenzo Catucci in :issue:`4473`.) + + inspect ------- @@ -241,18 +274,50 @@ ssl --- -TLSv1.1 and TLSv1.2 support (Contributed by Michele Orr? and Antoine Pitrou -in :issue:`16692`) +TLSv1.1 and TLSv1.2 support. -New diagnostic functions :func:`~ssl.get_default_verify_paths`, -:meth:`~ssl.SSLContext.cert_store_stats` and -:meth:`~ssl.SSLContext.get_ca_certs` +(Contributed by Michele Orr? and Antoine Pitrou in :issue:`16692`) -Add :func:`ssl.enum_cert_store` to retrieve certificates and CRL from Windows' -cert store. +* New diagnostic functions :func:`~ssl.get_default_verify_paths`, + :meth:`~ssl.SSLContext.cert_store_stats` and + :meth:`~ssl.SSLContext.get_ca_certs` + +* Add :func:`ssl.enum_cert_store` to retrieve certificates and CRL from Windows' + cert store. (Contributed by Christian Heimes in :issue:`18143`, :issue:`18147` and -:issue:`17134`) + :issue:`17134`.) + +Support for server-side SNI using the new +:meth:`ssl.SSLContext.set_servername_callback` method. + +(Contributed by Daniel Black in :issue:`8109`.) + + +struct +------ + +Streaming struct unpacking using :func:`struct.iter_unpack`. + +(Contributed by Antoine Pitrou in :issue:`17804`.) + + +urllib +------ + +Add support.for ``data:`` URLs in :mod:`urllib.request`. + +(Contributed by Mathias Panzenb?ck in :issue:`16423`.) + + +unittest +-------- + +Support for easy dynamically-generated subtests using the +:meth:`~unittest.TestCase.subTest` context manager. + +(Contributed by Antoine Pitrou in :issue:`16997`.) + wave ---- @@ -263,6 +328,7 @@ :meth:`wave.open` now supports the context manager protocol. (Contributed by Claudiu Popa in :issue:`17616`.) + stat ---- @@ -272,6 +338,25 @@ The module supports new file types: door, event port and whiteout. + +weakref +------- + +New :class:`~weakref.WeakMethod` class simulates weak references to bound +methods. + +(Contributed by Antoine Pitrou in :issue:`14631`.) + + +xml.etree +--------- + +Add an event-driven parser for non-blocking applications, +:class:`~xml.etree.ElementTree.IncrementalParser`. + +(Contributed by Antoine Pitrou in :issue:`17782`.) + + colorsys -------- @@ -280,6 +365,14 @@ results should be less than 1% and may better match results found elsewhere. +Other improvements +================== + +Tab-completion is now enabled by default in the interactive interpreter. + +(Contributed by Antoine Pitrou and ?ric Araujo in :issue:`5845`.) + + Optimizations ============= -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 20:48:25 2013 From: python-checkins at python.org (antoine.pitrou) Date: Mon, 12 Aug 2013 20:48:25 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Sort_whatsnew_entries_alph?= =?utf-8?q?abetically?= Message-ID: <3cDR096mS0zSm4@mail.python.org> http://hg.python.org/cpython/rev/b788d04f14d6 changeset: 85132:b788d04f14d6 user: Antoine Pitrou date: Mon Aug 12 20:48:15 2013 +0200 summary: Sort whatsnew entries alphabetically files: Doc/whatsnew/3.4.rst | 66 ++++++++++++++++--------------- 1 files changed, 34 insertions(+), 32 deletions(-) diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -173,6 +173,20 @@ Improved Modules ================ +aifc +---- + +The :meth:`~aifc.getparams` method now returns a namedtuple rather than a +plain tuple. (Contributed by Claudiu Popa in :issue:`17818`.) + + +colorsys +-------- + +The number of digits in the coefficients for the RGB --- YIQ conversions have +been expanded so that they match the FCC NTSC versions. The change in +results should be less than 1% and may better match results found elsewhere. + dis --- @@ -189,6 +203,7 @@ (Contributed by Nick Coghlan, Ryan Kelly and Thomas Kluyver in :issue:`11816`) + doctest ------- @@ -202,12 +217,6 @@ FAIL_FAST`` (to parallel the similar option supported by the :mod:`unittest` CLI). (Contributed by R. David Murray in :issue:`11390`.) -aifc ----- - -The :meth:`~aifc.getparams` method now returns a namedtuple rather than a -plain tuple. (Contributed by Claudiu Popa in :issue:`17818`.) - email ----- @@ -235,6 +244,14 @@ New :func:`functools.singledispatch` decorator: see the :pep:`443`. +inspect +------- + +:func:`~inspect.unwrap` makes it easy to unravel wrapper function chains +created by :func:`functools.wraps` (and any other API that sets the +``__wrapped__`` attribute on a wrapper function). + + mmap ---- @@ -255,14 +272,6 @@ (Contributed by Lorenzo Catucci in :issue:`4473`.) -inspect -------- - -:func:`~inspect.unwrap` makes it easy to unravel wrapper function chains -created by :func:`functools.wraps` (and any other API that sets the -``__wrapped__`` attribute on a wrapper function). - - smtplib ------- @@ -271,6 +280,7 @@ try/except statement by code that only cares whether or not an error occurred. (:issue:`2118`). + ssl --- @@ -294,6 +304,16 @@ (Contributed by Daniel Black in :issue:`8109`.) +stat +---- + +The :mod:`stat` module is now backed by a C implementation in :mod:`_stat`. A C +implementation is required as most of the values aren't standardized and +platform-dependent. (Contributed by Christian Heimes in :issue:`11016`.) + +The module supports new file types: door, event port and whiteout. + + struct ------ @@ -329,16 +349,6 @@ by Claudiu Popa in :issue:`17616`.) -stat ----- - -The :mod:`stat` module is now backed by a C implementation in :mod:`_stat`. A C -implementation is required as most of the values aren't standardized and -platform-dependent. (Contributed by Christian Heimes in :issue:`11016`.) - -The module supports new file types: door, event port and whiteout. - - weakref ------- @@ -357,14 +367,6 @@ (Contributed by Antoine Pitrou in :issue:`17782`.) -colorsys --------- - -The number of digits in the coefficients for the RGB --- YIQ conversions have -been expanded so that they match the FCC NTSC versions. The change in -results should be less than 1% and may better match results found elsewhere. - - Other improvements ================== -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 21:51:23 2013 From: python-checkins at python.org (david.wolever) Date: Mon, 12 Aug 2013 21:51:23 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_Improving_strf?= =?utf-8?q?time_documentation=2E?= Message-ID: <3cDSNq1MtkzSdF@mail.python.org> http://hg.python.org/cpython/rev/b3b1dcdc8cee changeset: 85133:b3b1dcdc8cee branch: 2.7 parent: 83306:26b7431ba40c user: David Wolever date: Sat Apr 13 19:12:58 2013 -0400 summary: Improving strftime documentation. Re-ordering the table so similar directives are grouped together, adding examples, and removing some redundancy in the description of the ``%f`` formatter. files: Doc/library/datetime.rst | 246 +++++++++++++------------- 1 files changed, 124 insertions(+), 122 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -551,8 +551,9 @@ .. method:: date.strftime(format) Return a string representing the date, controlled by an explicit format string. - Format codes referring to hours, minutes or seconds will see 0 values. See - section :ref:`strftime-strptime-behavior`. + Format codes referring to hours, minutes or seconds will see 0 values. For a + complete list of formatting directives, see section + :ref:`strftime-strptime-behavior`. .. method:: date.__format__(format) @@ -730,7 +731,8 @@ *format*. This is equivalent to ``datetime(*(time.strptime(date_string, format)[0:6]))``. :exc:`ValueError` is raised if the date_string and format can't be parsed by :func:`time.strptime` or if it returns a value which isn't a - time tuple. See section :ref:`strftime-strptime-behavior`. + time tuple. For a complete list of formatting directives, see section + :ref:`strftime-strptime-behavior`. .. versionadded:: 2.5 @@ -1050,7 +1052,8 @@ .. method:: datetime.strftime(format) Return a string representing the date and time, controlled by an explicit format - string. See section :ref:`strftime-strptime-behavior`. + string. For a complete list of formatting directives, see section + :ref:`strftime-strptime-behavior`. .. method:: datetime.__format__(format) @@ -1283,7 +1286,8 @@ .. method:: time.strftime(format) Return a string representing the time, controlled by an explicit format string. - See section :ref:`strftime-strptime-behavior`. + For a complete list of formatting directives, see section + :ref:`strftime-strptime-behavior`. .. method:: time.__format__(format) @@ -1597,27 +1601,6 @@ microseconds should not be used, as :class:`date` objects have no such values. If they're used anyway, ``0`` is substituted for them. -.. versionadded:: 2.6 - :class:`.time` and :class:`.datetime` objects support a ``%f`` format code - which expands to the number of microseconds in the object, zero-padded on - the left to six places. - -For a naive object, the ``%z`` and ``%Z`` format codes are replaced by empty -strings. - -For an aware object: - -``%z`` - :meth:`utcoffset` is transformed into a 5-character string of the form +HHMM or - -HHMM, where HH is a 2-digit string giving the number of UTC offset hours, and - MM is a 2-digit string giving the number of UTC offset minutes. For example, if - :meth:`utcoffset` returns ``timedelta(hours=-3, minutes=-30)``, ``%z`` is - replaced with the string ``'-0330'``. - -``%Z`` - If :meth:`tzname` returns ``None``, ``%Z`` is replaced by an empty string. - Otherwise ``%Z`` is replaced by the returned value, which must be a string. - The full set of format codes supported varies across platforms, because Python calls the platform C library's :func:`strftime` function, and platform variations are common. @@ -1630,105 +1613,101 @@ The exact range of years for which :meth:`strftime` works also varies across platforms. Regardless of platform, years before 1900 cannot be used. -+-----------+--------------------------------+-------+ -| Directive | Meaning | Notes | -+===========+================================+=======+ -| ``%a`` | Locale's abbreviated weekday | | -| | name. | | -+-----------+--------------------------------+-------+ -| ``%A`` | Locale's full weekday name. | | -+-----------+--------------------------------+-------+ -| ``%b`` | Locale's abbreviated month | | -| | name. | | -+-----------+--------------------------------+-------+ -| ``%B`` | Locale's full month name. | | -+-----------+--------------------------------+-------+ -| ``%c`` | Locale's appropriate date and | | -| | time representation. | | -+-----------+--------------------------------+-------+ -| ``%d`` | Day of the month as a decimal | | -| | number [01,31]. | | -+-----------+--------------------------------+-------+ -| ``%f`` | Microsecond as a decimal | \(1) | -| | number [0,999999], zero-padded | | -| | on the left | | -+-----------+--------------------------------+-------+ -| ``%H`` | Hour (24-hour clock) as a | | -| | decimal number [00,23]. | | -+-----------+--------------------------------+-------+ -| ``%I`` | Hour (12-hour clock) as a | | -| | decimal number [01,12]. | | -+-----------+--------------------------------+-------+ -| ``%j`` | Day of the year as a decimal | | -| | number [001,366]. | | -+-----------+--------------------------------+-------+ -| ``%m`` | Month as a decimal number | | -| | [01,12]. | | -+-----------+--------------------------------+-------+ -| ``%M`` | Minute as a decimal number | | -| | [00,59]. | | -+-----------+--------------------------------+-------+ -| ``%p`` | Locale's equivalent of either | \(2) | -| | AM or PM. | | -+-----------+--------------------------------+-------+ -| ``%S`` | Second as a decimal number | \(3) | -| | [00,61]. | | -+-----------+--------------------------------+-------+ -| ``%U`` | Week number of the year | \(4) | -| | (Sunday as the first day of | | -| | the week) as a decimal number | | -| | [00,53]. All days in a new | | -| | year preceding the first | | -| | Sunday are considered to be in | | -| | week 0. | | -+-----------+--------------------------------+-------+ -| ``%w`` | Weekday as a decimal number | | -| | [0(Sunday),6]. | | -+-----------+--------------------------------+-------+ -| ``%W`` | Week number of the year | \(4) | -| | (Monday as the first day of | | -| | the week) as a decimal number | | -| | [00,53]. All days in a new | | -| | year preceding the first | | -| | Monday are considered to be in | | -| | week 0. | | -+-----------+--------------------------------+-------+ -| ``%x`` | Locale's appropriate date | | -| | representation. | | -+-----------+--------------------------------+-------+ -| ``%X`` | Locale's appropriate time | | -| | representation. | | -+-----------+--------------------------------+-------+ -| ``%y`` | Year without century as a | | -| | decimal number [00,99]. | | -+-----------+--------------------------------+-------+ -| ``%Y`` | Year with century as a decimal | | -| | number. | | -+-----------+--------------------------------+-------+ -| ``%z`` | UTC offset in the form +HHMM | \(5) | -| | or -HHMM (empty string if the | | -| | the object is naive). | | -+-----------+--------------------------------+-------+ -| ``%Z`` | Time zone name (empty string | | -| | if the object is naive). | | -+-----------+--------------------------------+-------+ -| ``%%`` | A literal ``'%'`` character. | | -+-----------+--------------------------------+-------+ ++-----------+--------------------------------+------------------------+-------+ +| Directive | Meaning | Example | Notes | ++===========+================================+========================+=======+ +| ``%a`` | Weekday as locale's | Sun, Mon, ..., Sat | | +| | abbreviated name. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%A`` | Weekday as locale's full name. | Sunday, Monday, ..., | | +| | | Saturday | | ++-----------+--------------------------------+------------------------+-------+ +| ``%w`` | Weekday as a decimal number, | 0, 1, ..., 6 | | +| | where 0 is Sunday and 6 is | | | +| | Saturday. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%d`` | Day of the month as a | 01, 02, ..., 31 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%j`` | Day of the year as a | 001, 002, ..., 366 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%b`` | Month as locale's abbreviated | Jan, Feb, ..., Dec | | +| | name. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%B`` | Month as locale's full name. | January, February, | | +| | | ..., December | | ++-----------+--------------------------------+------------------------+-------+ +| ``%m`` | Month as a zero-padded | 01, 02, ..., 12 | | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%y`` | Year without century as a | 00, 01, ..., 99 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%Y`` | Year with century as a decimal | 1970, 1988, 2001, 2013 | | +| | number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%H`` | Hour (24-hour clock) as a | 00, 01, ..., 23 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%I`` | Hour (12-hour clock) as a | 01, 02, ..., 12 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%p`` | Locale's equivalent of either | AM, PM | \(1) | +| | AM or PM. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%M`` | Minute as a zero-padded | 00, 01, ..., 59 | | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%S`` | Second as a zero-padded | 00, 01, ..., 61 | \(2) | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%f`` | Microsecond as a decimal | 000000, 000001, ..., | \(3) | +| | number, zero-padded on the | 999999 | | +| | left. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%z`` | UTC offset in the form +HHMM | (empty), +0000, -0400, | \(4) | +| | or -HHMM (empty string if the | +1030 | | +| | the object is naive). | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%Z`` | Time zone name (empty string | (empty), UTC, EST, CST | | +| | if the object is naive). | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%U`` | Week number of the year | 00, 01, ..., 53 | \(5) | +| | (Sunday as the first day of | | | +| | the week) as a zero padded | | | +| | decimal number. All days in a | | | +| | new year preceding the first | | | +| | Sunday are considered to be in | | | +| | week 0. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%W`` | Week number of the year | 00, 01, ..., 53 | \(5) | +| | (Monday as the first day of | | | +| | the week) as a decimal number. | | | +| | All days in a new year | | | +| | preceding the first Monday | | | +| | are considered to be in | | | +| | week 0. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%c`` | Locale's appropriate date and | Mon Aug 1 16:00:00 | | +| | time representation. | 1988 | | ++-----------+--------------------------------+------------------------+-------+ +| ``%x`` | Locale's appropriate date | 08/16/88 | | +| | representation. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%X`` | Locale's appropriate time | 16:00:00 | | +| | representation. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%%`` | A literal ``'%'`` character. | % | | ++-----------+--------------------------------+------------------------+-------+ Notes: (1) - When used with the :meth:`strptime` method, the ``%f`` directive - accepts from one to six digits and zero pads on the right. ``%f`` is - an extension to the set of format characters in the C standard (but - implemented separately in datetime objects, and therefore always - available). - -(2) When used with the :meth:`strptime` method, the ``%p`` directive only affects the output hour field if the ``%I`` directive is used to parse the hour. -(3) +(2) The range really is ``0`` to ``61``; according to the Posix standard this accounts for leap seconds and the (very rare) double leap seconds. The :mod:`time` module may produce and does accept leap seconds since @@ -1736,13 +1715,36 @@ does not accept leap seconds in :meth:`strptime` input nor will it produce them in :func:`strftime` output. +(3) + ``%f`` is an extension to the set of format characters in the C standard + (but implemented separately in datetime objects, and therefore always + available). When used with the :meth:`strptime` method, the ``%f`` + directive accepts from one to six digits and zero pads on the right. + + .. versionadded:: 2.6 + (4) - When used with the :meth:`strptime` method, ``%U`` and ``%W`` are only used in - calculations when the day of the week and the year are specified. + For a naive object, the ``%z`` and ``%Z`` format codes are replaced by empty + strings. + + For an aware object: + + ``%z`` + :meth:`utcoffset` is transformed into a 5-character string of the form + +HHMM or -HHMM, where HH is a 2-digit string giving the number of UTC + offset hours, and MM is a 2-digit string giving the number of UTC offset + minutes. For example, if :meth:`utcoffset` returns + ``timedelta(hours=-3, minutes=-30)``, ``%z`` is replaced with the string + ``'-0330'``. + + ``%Z`` + If :meth:`tzname` returns ``None``, ``%Z`` is replaced by an empty + string. Otherwise ``%Z`` is replaced by the returned value, which must + be a string. (5) - For example, if :meth:`utcoffset` returns ``timedelta(hours=-3, minutes=-30)``, - ``%z`` is replaced with the string ``'-0330'``. + When used with the :meth:`strptime` method, ``%U`` and ``%W`` are only used + in calculations when the day of the week and the year are specified. .. rubric:: Footnotes -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 21:51:24 2013 From: python-checkins at python.org (david.wolever) Date: Mon, 12 Aug 2013 21:51:24 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_Adding_localiz?= =?utf-8?q?ed_examples_to_strftime_documentation=2E?= Message-ID: <3cDSNr4L9zzT0v@mail.python.org> http://hg.python.org/cpython/rev/ae18c5ae2c4d changeset: 85134:ae18c5ae2c4d branch: 2.7 user: David Wolever date: Sat Apr 13 20:50:24 2013 -0400 summary: Adding localized examples to strftime documentation. files: Doc/library/datetime.rst | 86 ++++++++++++++++++--------- 1 files changed, 58 insertions(+), 28 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -1616,11 +1616,19 @@ +-----------+--------------------------------+------------------------+-------+ | Directive | Meaning | Example | Notes | +===========+================================+========================+=======+ -| ``%a`` | Weekday as locale's | Sun, Mon, ..., Sat | | -| | abbreviated name. | | | +| ``%a`` | Weekday as locale's || Sun, Mon, ..., Sat | \(1) | +| | abbreviated name. | (en_US); | | +| | || So, Mo, ..., Sa | | +| | | (de_DE); | | +| | || ?, ?, ..., ? | | +| | | (ja_JP) | | +-----------+--------------------------------+------------------------+-------+ -| ``%A`` | Weekday as locale's full name. | Sunday, Monday, ..., | | -| | | Saturday | | +| ``%A`` | Weekday as locale's full name. || Sunday, Monday, ..., | \(1) | +| | | Saturday (en_US); | | +| | || Sonntag, Montag, ..., | | +| | | Samstag (de_DE); | | +| | || ???, ???, ..., | | +| | | ??? (ja_JP) | | +-----------+--------------------------------+------------------------+-------+ | ``%w`` | Weekday as a decimal number, | 0, 1, ..., 6 | | | | where 0 is Sunday and 6 is | | | @@ -1629,14 +1637,18 @@ | ``%d`` | Day of the month as a | 01, 02, ..., 31 | | | | zero-padded decimal number. | | | +-----------+--------------------------------+------------------------+-------+ -| ``%j`` | Day of the year as a | 001, 002, ..., 366 | | -| | zero-padded decimal number. | | | +| ``%b`` | Month as locale's abbreviated || Jan, Feb, ..., Dec | \(1) | +| | name. | (en_US); | | +| | || Jan, Feb, ..., Dez | | +| | | (de_DE); | | +| | || 1, 2, ..., 12 (ja_JP) | | +-----------+--------------------------------+------------------------+-------+ -| ``%b`` | Month as locale's abbreviated | Jan, Feb, ..., Dec | | -| | name. | | | -+-----------+--------------------------------+------------------------+-------+ -| ``%B`` | Month as locale's full name. | January, February, | | -| | | ..., December | | +| ``%B`` | Month as locale's full name. || January, February, | \(1) | +| | | ..., December (en_US);| | +| | || Januar, Februar, ..., | | +| | | Dezember (de_DE); | | +| | || 1?, 2?, ..., 12? | | +| | | (ja_JP) | | +-----------+--------------------------------+------------------------+-------+ | ``%m`` | Month as a zero-padded | 01, 02, ..., 12 | | | | decimal number. | | | @@ -1653,27 +1665,31 @@ | ``%I`` | Hour (12-hour clock) as a | 01, 02, ..., 12 | | | | zero-padded decimal number. | | | +-----------+--------------------------------+------------------------+-------+ -| ``%p`` | Locale's equivalent of either | AM, PM | \(1) | -| | AM or PM. | | | +| ``%p`` | Locale's equivalent of either || AM, PM (en_US); | \(1), | +| | AM or PM. || am, pm (de_DE); | \(2) | +| | || AM, PM (ja_JP) | | +-----------+--------------------------------+------------------------+-------+ | ``%M`` | Minute as a zero-padded | 00, 01, ..., 59 | | | | decimal number. | | | +-----------+--------------------------------+------------------------+-------+ -| ``%S`` | Second as a zero-padded | 00, 01, ..., 61 | \(2) | +| ``%S`` | Second as a zero-padded | 00, 01, ..., 61 | \(3) | | | decimal number. | | | +-----------+--------------------------------+------------------------+-------+ -| ``%f`` | Microsecond as a decimal | 000000, 000001, ..., | \(3) | +| ``%f`` | Microsecond as a decimal | 000000, 000001, ..., | \(4) | | | number, zero-padded on the | 999999 | | | | left. | | | +-----------+--------------------------------+------------------------+-------+ -| ``%z`` | UTC offset in the form +HHMM | (empty), +0000, -0400, | \(4) | +| ``%z`` | UTC offset in the form +HHMM | (empty), +0000, -0400, | \(5) | | | or -HHMM (empty string if the | +1030 | | | | the object is naive). | | | +-----------+--------------------------------+------------------------+-------+ | ``%Z`` | Time zone name (empty string | (empty), UTC, EST, CST | | | | if the object is naive). | | | +-----------+--------------------------------+------------------------+-------+ -| ``%U`` | Week number of the year | 00, 01, ..., 53 | \(5) | +| ``%j`` | Day of the year as a | 001, 002, ..., 366 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%U`` | Week number of the year | 00, 01, ..., 53 | \(6) | | | (Sunday as the first day of | | | | | the week) as a zero padded | | | | | decimal number. All days in a | | | @@ -1681,7 +1697,7 @@ | | Sunday are considered to be in | | | | | week 0. | | | +-----------+--------------------------------+------------------------+-------+ -| ``%W`` | Week number of the year | 00, 01, ..., 53 | \(5) | +| ``%W`` | Week number of the year | 00, 01, ..., 53 | \(6) | | | (Monday as the first day of | | | | | the week) as a decimal number. | | | | | All days in a new year | | | @@ -1689,14 +1705,21 @@ | | are considered to be in | | | | | week 0. | | | +-----------+--------------------------------+------------------------+-------+ -| ``%c`` | Locale's appropriate date and | Mon Aug 1 16:00:00 | | -| | time representation. | 1988 | | +| ``%c`` | Locale's appropriate date and || Tue Aug 16 21:30:00 | \(1) | +| | time representation. | 1988 (en_US), | | +| | || Di 16 Aug 21:30:00 | | +| | | 1988 (de_DE), | | +| | || ? 8/16 21:30:00 | | +| | | 1988 (ja_JP) | | +-----------+--------------------------------+------------------------+-------+ -| ``%x`` | Locale's appropriate date | 08/16/88 | | -| | representation. | | | +| ``%x`` | Locale's appropriate date || 08/16/88 (None), | \(1) | +| | representation. || 08/16/1988 (en_US), | | +| | || 16.08.1988 (de_DE), | | +| | || 1988/08/16 (ja_JP) | | +-----------+--------------------------------+------------------------+-------+ -| ``%X`` | Locale's appropriate time | 16:00:00 | | -| | representation. | | | +| ``%X`` | Locale's appropriate time || 21:30:00 (en_US), | \(1) | +| | representation. || 21:30:00 (de_DE), | | +| | || 21?30?00? (ja_JP) | | +-----------+--------------------------------+------------------------+-------+ | ``%%`` | A literal ``'%'`` character. | % | | +-----------+--------------------------------+------------------------+-------+ @@ -1704,10 +1727,17 @@ Notes: (1) + Because the format depends on the current locale, care should be taken when + making assumptions about the output value. Field orderings will vary (for + example, "month/day/year" versus "day/month/year"), and the output may + contain UTF-8 encoded unicode characters (for example, the ``ja_JP`` locale + may include Japanese characters). + +(2) When used with the :meth:`strptime` method, the ``%p`` directive only affects the output hour field if the ``%I`` directive is used to parse the hour. -(2) +(3) The range really is ``0`` to ``61``; according to the Posix standard this accounts for leap seconds and the (very rare) double leap seconds. The :mod:`time` module may produce and does accept leap seconds since @@ -1715,7 +1745,7 @@ does not accept leap seconds in :meth:`strptime` input nor will it produce them in :func:`strftime` output. -(3) +(4) ``%f`` is an extension to the set of format characters in the C standard (but implemented separately in datetime objects, and therefore always available). When used with the :meth:`strptime` method, the ``%f`` @@ -1723,7 +1753,7 @@ .. versionadded:: 2.6 -(4) +(5) For a naive object, the ``%z`` and ``%Z`` format codes are replaced by empty strings. @@ -1742,7 +1772,7 @@ string. Otherwise ``%Z`` is replaced by the returned value, which must be a string. -(5) +(6) When used with the :meth:`strptime` method, ``%U`` and ``%W`` are only used in calculations when the day of the week and the year are specified. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 21:51:25 2013 From: python-checkins at python.org (david.wolever) Date: Mon, 12 Aug 2013 21:51:25 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_Correct_mislea?= =?utf-8?q?ding_strftime_documentation=2E?= Message-ID: <3cDSNs6N4YzSrn@mail.python.org> http://hg.python.org/cpython/rev/53a0e908f787 changeset: 85135:53a0e908f787 branch: 2.7 user: David Wolever date: Sat Apr 13 22:40:11 2013 -0400 summary: Correct misleading strftime documentation. strftime using locale-aware formatting directives will often, but not awlays, produce UTF-8-encoded Unicode. files: Doc/library/datetime.rst | 6 ++++-- 1 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -1730,8 +1730,10 @@ Because the format depends on the current locale, care should be taken when making assumptions about the output value. Field orderings will vary (for example, "month/day/year" versus "day/month/year"), and the output may - contain UTF-8 encoded unicode characters (for example, the ``ja_JP`` locale - may include Japanese characters). + contain Unicode characters (encoded, by default, using UTF-8, but this may + vary based on the locale; for example, the ``ja_JP`` locale contains UTF-8 + encoded Japanese characters, but ``ja_JP.SJIS`` contains Shift JIS encoded + Japanese characters). (2) When used with the :meth:`strptime` method, the ``%p`` directive only affects -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 21:51:27 2013 From: python-checkins at python.org (david.wolever) Date: Mon, 12 Aug 2013 21:51:27 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_Remove_Japanes?= =?utf-8?q?e_datetime_examples?= Message-ID: <3cDSNv27PhzSdF@mail.python.org> http://hg.python.org/cpython/rev/20a9ffdcfe5d changeset: 85136:20a9ffdcfe5d branch: 2.7 user: David Wolever date: Thu May 23 17:23:49 2013 -0400 summary: Remove Japanese datetime examples files: Doc/library/datetime.rst | 36 +++++++++------------------ 1 files changed, 12 insertions(+), 24 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -1619,16 +1619,12 @@ | ``%a`` | Weekday as locale's || Sun, Mon, ..., Sat | \(1) | | | abbreviated name. | (en_US); | | | | || So, Mo, ..., Sa | | -| | | (de_DE); | | -| | || ?, ?, ..., ? | | -| | | (ja_JP) | | +| | | (de_DE) | | +-----------+--------------------------------+------------------------+-------+ | ``%A`` | Weekday as locale's full name. || Sunday, Monday, ..., | \(1) | | | | Saturday (en_US); | | | | || Sonntag, Montag, ..., | | -| | | Samstag (de_DE); | | -| | || ???, ???, ..., | | -| | | ??? (ja_JP) | | +| | | Samstag (de_DE) | | +-----------+--------------------------------+------------------------+-------+ | ``%w`` | Weekday as a decimal number, | 0, 1, ..., 6 | | | | where 0 is Sunday and 6 is | | | @@ -1640,15 +1636,12 @@ | ``%b`` | Month as locale's abbreviated || Jan, Feb, ..., Dec | \(1) | | | name. | (en_US); | | | | || Jan, Feb, ..., Dez | | -| | | (de_DE); | | -| | || 1, 2, ..., 12 (ja_JP) | | +| | | (de_DE) | | +-----------+--------------------------------+------------------------+-------+ | ``%B`` | Month as locale's full name. || January, February, | \(1) | | | | ..., December (en_US);| | | | || Januar, Februar, ..., | | -| | | Dezember (de_DE); | | -| | || 1?, 2?, ..., 12? | | -| | | (ja_JP) | | +| | | Dezember (de_DE) | | +-----------+--------------------------------+------------------------+-------+ | ``%m`` | Month as a zero-padded | 01, 02, ..., 12 | | | | decimal number. | | | @@ -1666,8 +1659,7 @@ | | zero-padded decimal number. | | | +-----------+--------------------------------+------------------------+-------+ | ``%p`` | Locale's equivalent of either || AM, PM (en_US); | \(1), | -| | AM or PM. || am, pm (de_DE); | \(2) | -| | || AM, PM (ja_JP) | | +| | AM or PM. || am, pm (de_DE) | \(2) | +-----------+--------------------------------+------------------------+-------+ | ``%M`` | Minute as a zero-padded | 00, 01, ..., 59 | | | | decimal number. | | | @@ -1706,20 +1698,16 @@ | | week 0. | | | +-----------+--------------------------------+------------------------+-------+ | ``%c`` | Locale's appropriate date and || Tue Aug 16 21:30:00 | \(1) | -| | time representation. | 1988 (en_US), | | +| | time representation. | 1988 (en_US); | | | | || Di 16 Aug 21:30:00 | | -| | | 1988 (de_DE), | | -| | || ? 8/16 21:30:00 | | -| | | 1988 (ja_JP) | | +| | | 1988 (de_DE) | | +-----------+--------------------------------+------------------------+-------+ -| ``%x`` | Locale's appropriate date || 08/16/88 (None), | \(1) | -| | representation. || 08/16/1988 (en_US), | | -| | || 16.08.1988 (de_DE), | | -| | || 1988/08/16 (ja_JP) | | +| ``%x`` | Locale's appropriate date || 08/16/88 (None); | \(1) | +| | representation. || 08/16/1988 (en_US); | | +| | || 16.08.1988 (de_DE) | | +-----------+--------------------------------+------------------------+-------+ -| ``%X`` | Locale's appropriate time || 21:30:00 (en_US), | \(1) | -| | representation. || 21:30:00 (de_DE), | | -| | || 21?30?00? (ja_JP) | | +| ``%X`` | Locale's appropriate time || 21:30:00 (en_US); | \(1) | +| | representation. || 21:30:00 (de_DE) | | +-----------+--------------------------------+------------------------+-------+ | ``%%`` | A literal ``'%'`` character. | % | | +-----------+--------------------------------+------------------------+-------+ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 21:51:28 2013 From: python-checkins at python.org (david.wolever) Date: Mon, 12 Aug 2013 21:51:28 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_Clarify_that_s?= =?utf-8?q?trftime=27s_encoding_is_based_on_locale?= Message-ID: <3cDSNw3vSTz7LjZ@mail.python.org> http://hg.python.org/cpython/rev/102b3e257dca changeset: 85137:102b3e257dca branch: 2.7 user: David Wolever date: Thu May 23 17:42:14 2013 -0400 summary: Clarify that strftime's encoding is based on locale files: Doc/library/datetime.rst | 8 ++++---- 1 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -1718,10 +1718,10 @@ Because the format depends on the current locale, care should be taken when making assumptions about the output value. Field orderings will vary (for example, "month/day/year" versus "day/month/year"), and the output may - contain Unicode characters (encoded, by default, using UTF-8, but this may - vary based on the locale; for example, the ``ja_JP`` locale contains UTF-8 - encoded Japanese characters, but ``ja_JP.SJIS`` contains Shift JIS encoded - Japanese characters). + contain Unicode characters encoded using the locale's default encoding (for + example, if the current locale is ``js_JP``, the default encoding could be + any one of ``eucJP``, ``SJIS``, or ``utf-8``; use :meth:`locale.getlocale` + to determine the current locale's encoding). (2) When used with the :meth:`strptime` method, the ``%p`` directive only affects -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 21:51:32 2013 From: python-checkins at python.org (david.wolever) Date: Mon, 12 Aug 2013 21:51:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAobWVyZ2UgMi43IC0+IDIuNyk6?= =?utf-8?q?_Clean_merge?= Message-ID: <3cDSP02w6Qz7LjM@mail.python.org> http://hg.python.org/cpython/rev/0f4d971b0cee changeset: 85138:0f4d971b0cee branch: 2.7 parent: 85137:102b3e257dca parent: 83899:ef037ad304c1 user: David Wolever date: Thu May 23 17:51:58 2013 -0400 summary: Clean merge files: .hgtags | 1 + Doc/c-api/exceptions.rst | 38 +- Doc/c-api/intro.rst | 4 +- Doc/faq/design.rst | 4 +- Doc/faq/programming.rst | 86 + Doc/glossary.rst | 8 + Doc/howto/advocacy.rst | 355 ------- Doc/howto/index.rst | 1 - Doc/howto/sockets.rst | 8 +- Doc/howto/urllib2.rst | 12 +- Doc/library/codecs.rst | 172 ++- Doc/library/collections.rst | 4 +- Doc/library/compileall.rst | 2 +- Doc/library/ctypes.rst | 2 +- Doc/library/io.rst | 3 + Doc/library/itertools.rst | 4 +- Doc/library/numbers.rst | 8 +- Doc/library/operator.rst | 47 +- Doc/library/resource.rst | 21 +- Doc/library/socket.rst | 16 +- Doc/library/ssl.rst | 16 +- Doc/library/stdtypes.rst | 28 +- Doc/library/string.rst | 5 +- Doc/library/unittest.rst | 2 + Doc/library/urllib.rst | 7 + Doc/library/urllib2.rst | 15 +- Doc/reference/datamodel.rst | 9 +- Doc/reference/expressions.rst | 15 +- Doc/reference/simple_stmts.rst | 3 + Doc/tutorial/inputoutput.rst | 23 +- Doc/tutorial/modules.rst | 7 +- Doc/using/mac.rst | 14 +- Include/object.h | 16 +- Include/patchlevel.h | 4 +- Lib/_weakrefset.py | 6 + Lib/collections.py | 2 - Lib/ctypes/test/__init__.py | 2 +- Lib/ctypes/test/test_wintypes.py | 43 + Lib/ctypes/util.py | 2 +- Lib/distutils/__init__.py | 2 +- Lib/filecmp.py | 2 +- Lib/gzip.py | 69 +- Lib/idlelib/Bindings.py | 4 + Lib/idlelib/EditorWindow.py | 31 +- Lib/idlelib/PyShell.py | 1 - Lib/idlelib/help.txt | 3 +- Lib/idlelib/idlever.py | 2 +- Lib/idlelib/run.py | 5 + Lib/logging/handlers.py | 36 +- Lib/mimetypes.py | 2 + Lib/multiprocessing/pool.py | 2 + Lib/multiprocessing/synchronize.py | 2 +- Lib/multiprocessing/util.py | 5 +- Lib/pickle.py | 2 +- Lib/plistlib.py | 4 +- Lib/pydoc_data/topics.py | 18 +- Lib/sre_parse.py | 6 +- Lib/ssl.py | 26 +- Lib/tarfile.py | 12 +- Lib/test/pickletester.py | 2 + Lib/test/test_base64.py | 26 + Lib/test/test_bz2.py | 31 +- Lib/test/test_collections.py | 2 +- Lib/test/test_dictviews.py | 5 + Lib/test/test_gdb.py | 46 +- Lib/test/test_gzip.py | 17 - Lib/test/test_io.py | 4 +- Lib/test/test_mimetypes.py | 2 + Lib/test/test_multiprocessing.py | 32 +- Lib/test/test_plistlib.py | 12 + Lib/test/test_pydoc.py | 57 +- Lib/test/test_re.py | 11 + Lib/test/test_sax.py | 20 + Lib/test/test_support.py | 9 + Lib/test/test_tarfile.py | 8 + Lib/test/test_tcl.py | 18 +- Lib/test/test_weakset.py | 6 + Lib/test/test_winreg.py | 12 +- Lib/test/test_zipfile.py | 10 +- Lib/test/testbz2_bigmem.bz2 | Bin Lib/threading.py | 42 +- Lib/xml/sax/saxutils.py | 8 +- Misc/ACKS | 9 + Misc/NEWS | 457 ++++++--- Misc/RPM/python-2.7.spec | 2 +- Modules/_ctypes/libffi/src/dlmalloc.c | 5 + Modules/_multiprocessing/multiprocessing.c | 2 +- Modules/_sqlite/cursor.c | 2 +- Modules/_sqlite/util.c | 8 +- Modules/_sqlite/util.h | 4 +- Modules/_testcapimodule.c | 2 +- Modules/cPickle.c | 10 +- Modules/dbmmodule.c | 8 +- Modules/operator.c | 14 +- Modules/readline.c | 27 +- Modules/selectmodule.c | 35 +- Modules/signalmodule.c | 14 +- Modules/sre.h | 4 +- Objects/dictobject.c | 4 + PCbuild/rt.bat | 4 +- README | 2 +- Tools/scripts/gprof2html.py | 2 +- configure | 2 +- configure.ac | 2 +- setup.py | 8 +- 105 files changed, 1301 insertions(+), 955 deletions(-) diff --git a/.hgtags b/.hgtags --- a/.hgtags +++ b/.hgtags @@ -158,3 +158,4 @@ 70274d53c1ddc60c5f9a2b8a422a49884021447c v2.7.3 a8d18780bc2bccf16bf580587e1e3c934a98f6a7 v2.7.4rc1 026ee0057e2d3305f90a9da41daf7c3f9eb1e814 v2.7.4 +ab05e7dd27889b93f20d97bae86170aabfe45ace v2.7.5 diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst --- a/Doc/c-api/exceptions.rst +++ b/Doc/c-api/exceptions.rst @@ -192,12 +192,19 @@ when the system call returns an error. +.. c:function:: PyObject* PyErr_SetFromErrnoWithFilenameObject(PyObject *type, PyObject *filenameObject) + + Similar to :c:func:`PyErr_SetFromErrno`, with the additional behavior that if + *filenameObject* is not *NULL*, it is passed to the constructor of *type* as + a third parameter. In the case of exceptions such as :exc:`IOError` and + :exc:`OSError`, this is used to define the :attr:`filename` attribute of the + exception instance. + + .. c:function:: PyObject* PyErr_SetFromErrnoWithFilename(PyObject *type, const char *filename) - Similar to :c:func:`PyErr_SetFromErrno`, with the additional behavior that if - *filename* is not *NULL*, it is passed to the constructor of *type* as a third - parameter. In the case of exceptions such as :exc:`IOError` and :exc:`OSError`, - this is used to define the :attr:`filename` attribute of the exception instance. + Similar to :c:func:`PyErr_SetFromErrnoWithFilenameObject`, but the filename + is given as a C string. .. c:function:: PyObject* PyErr_SetFromWindowsErr(int ierr) @@ -220,14 +227,29 @@ .. versionadded:: 2.3 +.. c:function:: PyObject* PyErr_SetFromWindowsErrWithFilenameObject(int ierr, PyObject *filenameObject) + + Similar to :c:func:`PyErr_SetFromWindowsErr`, with the additional behavior that + if *filenameObject* is not *NULL*, it is passed to the constructor of + :exc:`WindowsError` as a third parameter. Availability: Windows. + + .. c:function:: PyObject* PyErr_SetFromWindowsErrWithFilename(int ierr, const char *filename) - Similar to :c:func:`PyErr_SetFromWindowsErr`, with the additional behavior that - if *filename* is not *NULL*, it is passed to the constructor of - :exc:`WindowsError` as a third parameter. Availability: Windows. + Similar to :c:func:`PyErr_SetFromWindowsErrWithFilenameObject`, but the + filename is given as a C string. Availability: Windows. -.. c:function:: PyObject* PyErr_SetExcFromWindowsErrWithFilename(PyObject *type, int ierr, char *filename) +.. c:function:: PyObject* PyErr_SetExcFromWindowsErrWithFilenameObject(PyObject *type, int ierr, PyObject *filename) + + Similar to :c:func:`PyErr_SetFromWindowsErrWithFilenameObject`, with an + additional parameter specifying the exception type to be raised. + Availability: Windows. + + .. versionadded:: 2.3 + + +.. c:function:: PyObject* PyErr_SetExcFromWindowsErrWithFilename(PyObject *type, int ierr, const char *filename) Similar to :c:func:`PyErr_SetFromWindowsErrWithFilename`, with an additional parameter specifying the exception type to be raised. Availability: Windows. diff --git a/Doc/c-api/intro.rst b/Doc/c-api/intro.rst --- a/Doc/c-api/intro.rst +++ b/Doc/c-api/intro.rst @@ -255,8 +255,10 @@ PyObject *index = PyInt_FromLong(i); if (!index) return -1; - if (PyObject_SetItem(target, index, item) < 0) + if (PyObject_SetItem(target, index, item) < 0) { + Py_DECREF(index); return -1; + } Py_DECREF(index); } return 0; diff --git a/Doc/faq/design.rst b/Doc/faq/design.rst --- a/Doc/faq/design.rst +++ b/Doc/faq/design.rst @@ -910,8 +910,8 @@ When you have a literal value for a list, tuple, or dictionary spread across multiple lines, it's easier to add more elements because you don't have to -remember to add a comma to the previous line. The lines can also be sorted in -your editor without creating a syntax error. +remember to add a comma to the previous line. The lines can also be reordered +without creating a syntax error. Accidentally omitting the comma can lead to errors that are hard to diagnose. For example:: diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst --- a/Doc/faq/programming.rst +++ b/Doc/faq/programming.rst @@ -1223,6 +1223,92 @@ return map(apply, methods, [arguments]*nobjects) +Why does a_tuple[i] += ['item'] raise an exception when the addition works? +--------------------------------------------------------------------------- + +This is because of a combination of the fact that augmented assignment +operators are *assignment* operators, and the difference between mutable and +immutable objects in Python. + +This discussion applies in general when augmented assignment operators are +applied to elements of a tuple that point to mutable objects, but we'll use +a ``list`` and ``+=`` as our exemplar. + +If you wrote:: + + >>> a_tuple = (1, 2) + >>> a_tuple[0] += 1 + Traceback (most recent call last): + ... + TypeError: 'tuple' object does not support item assignment + +The reason for the exception should be immediately clear: ``1`` is added to the +object ``a_tuple[0]`` points to (``1``), producing the result object, ``2``, +but when we attempt to assign the result of the computation, ``2``, to element +``0`` of the tuple, we get an error because we can't change what an element of +a tuple points to. + +Under the covers, what this augmented assignment statement is doing is +approximately this:: + + >>> result = a_tuple[0] + 1 + >>> a_tuple[0] = result + Traceback (most recent call last): + ... + TypeError: 'tuple' object does not support item assignment + +It is the assignment part of the operation that produces the error, since a +tuple is immutable. + +When you write something like:: + + >>> a_tuple = (['foo'], 'bar') + >>> a_tuple[0] += ['item'] + Traceback (most recent call last): + ... + TypeError: 'tuple' object does not support item assignment + +The exception is a bit more surprising, and even more surprising is the fact +that even though there was an error, the append worked:: + + >>> a_tuple[0] + ['foo', 'item'] + +To see why this happens, you need to know that (a) if an object implements an +``__iadd__`` magic method, it gets called when the ``+=`` augmented assignment +is executed, and its return value is what gets used in the assignment statement; +and (b) for lists, ``__iadd__`` is equivalent to calling ``extend`` on the list +and returning the list. That's why we say that for lists, ``+=`` is a +"shorthand" for ``list.extend``:: + + >>> a_list = [] + >>> a_list += [1] + >>> a_list + [1] + +This is equivalent to:: + + >>> result = a_list.__iadd__([1]) + >>> a_list = result + +The object pointed to by a_list has been mutated, and the pointer to the +mutated object is assigned back to ``a_list``. The end result of the +assignment is a no-op, since it is a pointer to the same object that ``a_list`` +was previously pointing to, but the assignment still happens. + +Thus, in our tuple example what is happening is equivalent to:: + + >>> result = a_tuple[0].__iadd__(['item']) + >>> a_tuple[0] = result + Traceback (most recent call last): + ... + TypeError: 'tuple' object does not support item assignment + +The ``__iadd__`` succeeds, and thus the list is extended, but even though +``result`` points to the same object that ``a_tuple[0]`` already points to, +that final assignment still results in an error, because tuples are immutable. + + Dictionaries ============ diff --git a/Doc/glossary.rst b/Doc/glossary.rst --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -77,6 +77,14 @@ Benevolent Dictator For Life, a.k.a. `Guido van Rossum `_, Python's creator. + bytes-like object + An object that supports the :ref:`buffer protocol `, + like :class:`str`, :class:`bytearray` or :class:`memoryview`. + Bytes-like objects can be used for various operations that expect + binary data, such as compression, saving to a binary file or sending + over a socket. Some operations need the binary data to be mutable, + in which case not all bytes-like objects can apply. + bytecode Python source code is compiled into bytecode, the internal representation of a Python program in the CPython interpreter. The bytecode is also diff --git a/Doc/howto/advocacy.rst b/Doc/howto/advocacy.rst deleted file mode 100644 --- a/Doc/howto/advocacy.rst +++ /dev/null @@ -1,355 +0,0 @@ -************************* - Python Advocacy HOWTO -************************* - -:Author: A.M. Kuchling -:Release: 0.03 - - -.. topic:: Abstract - - It's usually difficult to get your management to accept open source software, - and Python is no exception to this rule. This document discusses reasons to use - Python, strategies for winning acceptance, facts and arguments you can use, and - cases where you *shouldn't* try to use Python. - - -Reasons to Use Python -===================== - -There are several reasons to incorporate a scripting language into your -development process, and this section will discuss them, and why Python has some -properties that make it a particularly good choice. - - -Programmability ---------------- - -Programs are often organized in a modular fashion. Lower-level operations are -grouped together, and called by higher-level functions, which may in turn be -used as basic operations by still further upper levels. - -For example, the lowest level might define a very low-level set of functions for -accessing a hash table. The next level might use hash tables to store the -headers of a mail message, mapping a header name like ``Date`` to a value such -as ``Tue, 13 May 1997 20:00:54 -0400``. A yet higher level may operate on -message objects, without knowing or caring that message headers are stored in a -hash table, and so forth. - -Often, the lowest levels do very simple things; they implement a data structure -such as a binary tree or hash table, or they perform some simple computation, -such as converting a date string to a number. The higher levels then contain -logic connecting these primitive operations. Using the approach, the primitives -can be seen as basic building blocks which are then glued together to produce -the complete product. - -Why is this design approach relevant to Python? Because Python is well suited -to functioning as such a glue language. A common approach is to write a Python -module that implements the lower level operations; for the sake of speed, the -implementation might be in C, Java, or even Fortran. Once the primitives are -available to Python programs, the logic underlying higher level operations is -written in the form of Python code. The high-level logic is then more -understandable, and easier to modify. - -John Ousterhout wrote a paper that explains this idea at greater length, -entitled "Scripting: Higher Level Programming for the 21st Century". I -recommend that you read this paper; see the references for the URL. Ousterhout -is the inventor of the Tcl language, and therefore argues that Tcl should be -used for this purpose; he only briefly refers to other languages such as Python, -Perl, and Lisp/Scheme, but in reality, Ousterhout's argument applies to -scripting languages in general, since you could equally write extensions for any -of the languages mentioned above. - - -Prototyping ------------ - -In *The Mythical Man-Month*, Fredrick Brooks suggests the following rule when -planning software projects: "Plan to throw one away; you will anyway." Brooks -is saying that the first attempt at a software design often turns out to be -wrong; unless the problem is very simple or you're an extremely good designer, -you'll find that new requirements and features become apparent once development -has actually started. If these new requirements can't be cleanly incorporated -into the program's structure, you're presented with two unpleasant choices: -hammer the new features into the program somehow, or scrap everything and write -a new version of the program, taking the new features into account from the -beginning. - -Python provides you with a good environment for quickly developing an initial -prototype. That lets you get the overall program structure and logic right, and -you can fine-tune small details in the fast development cycle that Python -provides. Once you're satisfied with the GUI interface or program output, you -can translate the Python code into C++, Fortran, Java, or some other compiled -language. - -Prototyping means you have to be careful not to use too many Python features -that are hard to implement in your other language. Using ``eval()``, or regular -expressions, or the :mod:`pickle` module, means that you're going to need C or -Java libraries for formula evaluation, regular expressions, and serialization, -for example. But it's not hard to avoid such tricky code, and in the end the -translation usually isn't very difficult. The resulting code can be rapidly -debugged, because any serious logical errors will have been removed from the -prototype, leaving only more minor slip-ups in the translation to track down. - -This strategy builds on the earlier discussion of programmability. Using Python -as glue to connect lower-level components has obvious relevance for constructing -prototype systems. In this way Python can help you with development, even if -end users never come in contact with Python code at all. If the performance of -the Python version is adequate and corporate politics allow it, you may not need -to do a translation into C or Java, but it can still be faster to develop a -prototype and then translate it, instead of attempting to produce the final -version immediately. - -One example of this development strategy is Microsoft Merchant Server. Version -1.0 was written in pure Python, by a company that subsequently was purchased by -Microsoft. Version 2.0 began to translate the code into C++, shipping with some -C++code and some Python code. Version 3.0 didn't contain any Python at all; all -the code had been translated into C++. Even though the product doesn't contain -a Python interpreter, the Python language has still served a useful purpose by -speeding up development. - -This is a very common use for Python. Past conference papers have also -described this approach for developing high-level numerical algorithms; see -David M. Beazley and Peter S. Lomdahl's paper "Feeding a Large-scale Physics -Application to Python" in the references for a good example. If an algorithm's -basic operations are things like "Take the inverse of this 4000x4000 matrix", -and are implemented in some lower-level language, then Python has almost no -additional performance cost; the extra time required for Python to evaluate an -expression like ``m.invert()`` is dwarfed by the cost of the actual computation. -It's particularly good for applications where seemingly endless tweaking is -required to get things right. GUI interfaces and Web sites are prime examples. - -The Python code is also shorter and faster to write (once you're familiar with -Python), so it's easier to throw it away if you decide your approach was wrong; -if you'd spent two weeks working on it instead of just two hours, you might -waste time trying to patch up what you've got out of a natural reluctance to -admit that those two weeks were wasted. Truthfully, those two weeks haven't -been wasted, since you've learnt something about the problem and the technology -you're using to solve it, but it's human nature to view this as a failure of -some sort. - - -Simplicity and Ease of Understanding ------------------------------------- - -Python is definitely *not* a toy language that's only usable for small tasks. -The language features are general and powerful enough to enable it to be used -for many different purposes. It's useful at the small end, for 10- or 20-line -scripts, but it also scales up to larger systems that contain thousands of lines -of code. - -However, this expressiveness doesn't come at the cost of an obscure or tricky -syntax. While Python has some dark corners that can lead to obscure code, there -are relatively few such corners, and proper design can isolate their use to only -a few classes or modules. It's certainly possible to write confusing code by -using too many features with too little concern for clarity, but most Python -code can look a lot like a slightly-formalized version of human-understandable -pseudocode. - -In *The New Hacker's Dictionary*, Eric S. Raymond gives the following definition -for "compact": - -.. epigraph:: - - Compact *adj.* Of a design, describes the valuable property that it can all be - apprehended at once in one's head. This generally means the thing created from - the design can be used with greater facility and fewer errors than an equivalent - tool that is not compact. Compactness does not imply triviality or lack of - power; for example, C is compact and FORTRAN is not, but C is more powerful than - FORTRAN. Designs become non-compact through accreting features and cruft that - don't merge cleanly into the overall design scheme (thus, some fans of Classic C - maintain that ANSI C is no longer compact). - - (From http://www.catb.org/~esr/jargon/html/C/compact.html) - -In this sense of the word, Python is quite compact, because the language has -just a few ideas, which are used in lots of places. Take namespaces, for -example. Import a module with ``import math``, and you create a new namespace -called ``math``. Classes are also namespaces that share many of the properties -of modules, and have a few of their own; for example, you can create instances -of a class. Instances? They're yet another namespace. Namespaces are currently -implemented as Python dictionaries, so they have the same methods as the -standard dictionary data type: .keys() returns all the keys, and so forth. - -This simplicity arises from Python's development history. The language syntax -derives from different sources; ABC, a relatively obscure teaching language, is -one primary influence, and Modula-3 is another. (For more information about ABC -and Modula-3, consult their respective Web sites at http://www.cwi.nl/~steven/abc/ -and http://www.m3.org.) Other features have come from C, Icon, -Algol-68, and even Perl. Python hasn't really innovated very much, but instead -has tried to keep the language small and easy to learn, building on ideas that -have been tried in other languages and found useful. - -Simplicity is a virtue that should not be underestimated. It lets you learn the -language more quickly, and then rapidly write code -- code that often works the -first time you run it. - - -Java Integration ----------------- - -If you're working with Java, Jython (http://www.jython.org/) is definitely worth -your attention. Jython is a re-implementation of Python in Java that compiles -Python code into Java bytecodes. The resulting environment has very tight, -almost seamless, integration with Java. It's trivial to access Java classes -from Python, and you can write Python classes that subclass Java classes. -Jython can be used for prototyping Java applications in much the same way -CPython is used, and it can also be used for test suites for Java code, or -embedded in a Java application to add scripting capabilities. - - -Arguments and Rebuttals -======================= - -Let's say that you've decided upon Python as the best choice for your -application. How can you convince your management, or your fellow developers, -to use Python? This section lists some common arguments against using Python, -and provides some possible rebuttals. - -**Python is freely available software that doesn't cost anything. How good can -it be?** - -Very good, indeed. These days Linux and Apache, two other pieces of open source -software, are becoming more respected as alternatives to commercial software, -but Python hasn't had all the publicity. - -Python has been around for several years, with many users and developers. -Accordingly, the interpreter has been used by many people, and has gotten most -of the bugs shaken out of it. While bugs are still discovered at intervals, -they're usually either quite obscure (they'd have to be, for no one to have run -into them before) or they involve interfaces to external libraries. The -internals of the language itself are quite stable. - -Having the source code should be viewed as making the software available for -peer review; people can examine the code, suggest (and implement) improvements, -and track down bugs. To find out more about the idea of open source code, along -with arguments and case studies supporting it, go to http://www.opensource.org. - -**Who's going to support it?** - -Python has a sizable community of developers, and the number is still growing. -The Internet community surrounding the language is an active one, and is worth -being considered another one of Python's advantages. Most questions posted to -the comp.lang.python newsgroup are quickly answered by someone. - -Should you need to dig into the source code, you'll find it's clear and -well-organized, so it's not very difficult to write extensions and track down -bugs yourself. If you'd prefer to pay for support, there are companies and -individuals who offer commercial support for Python. - -**Who uses Python for serious work?** - -Lots of people; one interesting thing about Python is the surprising diversity -of applications that it's been used for. People are using Python to: - -* Run Web sites - -* Write GUI interfaces - -* Control number-crunching code on supercomputers - -* Make a commercial application scriptable by embedding the Python interpreter - inside it - -* Process large XML data sets - -* Build test suites for C or Java code - -Whatever your application domain is, there's probably someone who's used Python -for something similar. Yet, despite being useable for such high-end -applications, Python's still simple enough to use for little jobs. - -See http://wiki.python.org/moin/OrganizationsUsingPython for a list of some of -the organizations that use Python. - -**What are the restrictions on Python's use?** - -They're practically nonexistent. Consult :ref:`history-and-license` for the full -language, but it boils down to three conditions: - -* You have to leave the copyright notice on the software; if you don't include - the source code in a product, you have to put the copyright notice in the - supporting documentation. - -* Don't claim that the institutions that have developed Python endorse your - product in any way. - -* If something goes wrong, you can't sue for damages. Practically all software - licenses contain this condition. - -Notice that you don't have to provide source code for anything that contains -Python or is built with it. Also, the Python interpreter and accompanying -documentation can be modified and redistributed in any way you like, and you -don't have to pay anyone any licensing fees at all. - -**Why should we use an obscure language like Python instead of well-known -language X?** - -I hope this HOWTO, and the documents listed in the final section, will help -convince you that Python isn't obscure, and has a healthily growing user base. -One word of advice: always present Python's positive advantages, instead of -concentrating on language X's failings. People want to know why a solution is -good, rather than why all the other solutions are bad. So instead of attacking -a competing solution on various grounds, simply show how Python's virtues can -help. - - -Useful Resources -================ - -http://www.pythonology.com/success - The Python Success Stories are a collection of stories from successful users of - Python, with the emphasis on business and corporate users. - -.. http://www.fsbassociates.com/books/pythonchpt1.htm - The first chapter of \emph{Internet Programming with Python} also - examines some of the reasons for using Python. The book is well worth - buying, but the publishers have made the first chapter available on - the Web. - -http://www.tcl.tk/doc/scripting.html - John Ousterhout's white paper on scripting is a good argument for the utility of - scripting languages, though naturally enough, he emphasizes Tcl, the language he - developed. Most of the arguments would apply to any scripting language. - -http://www.python.org/workshops/1997-10/proceedings/beazley.html - The authors, David M. Beazley and Peter S. Lomdahl, describe their use of - Python at Los Alamos National Laboratory. It's another good example of how - Python can help get real work done. This quotation from the paper has been - echoed by many people: - - .. epigraph:: - - Originally developed as a large monolithic application for massively parallel - processing systems, we have used Python to transform our application into a - flexible, highly modular, and extremely powerful system for performing - simulation, data analysis, and visualization. In addition, we describe how - Python has solved a number of important problems related to the development, - debugging, deployment, and maintenance of scientific software. - -http://pythonjournal.cognizor.com/pyj1/Everitt-Feit_interview98-V1.html - This interview with Andy Feit, discussing Infoseek's use of Python, can be used - to show that choosing Python didn't introduce any difficulties into a company's - development process, and provided some substantial benefits. - -.. http://www.python.org/psa/Commercial.html - Robin Friedrich wrote this document on how to support Python's use in - commercial projects. - -http://www.python.org/workshops/1997-10/proceedings/stein.ps - For the 6th Python conference, Greg Stein presented a paper that traced Python's - adoption and usage at a startup called eShop, and later at Microsoft. - -http://www.opensource.org - Management may be doubtful of the reliability and usefulness of software that - wasn't written commercially. This site presents arguments that show how open - source software can have considerable advantages over closed-source software. - -http://www.faqs.org/docs/Linux-mini/Advocacy.html - The Linux Advocacy mini-HOWTO was the inspiration for this document, and is also - well worth reading for general suggestions on winning acceptance for a new - technology, such as Linux or Python. In general, you won't make much progress - by simply attacking existing systems and complaining about their inadequacies; - this often ends up looking like unfocused whining. It's much better to point - out some of the many areas where Python is an improvement over other systems. - diff --git a/Doc/howto/index.rst b/Doc/howto/index.rst --- a/Doc/howto/index.rst +++ b/Doc/howto/index.rst @@ -13,7 +13,6 @@ .. toctree:: :maxdepth: 1 - advocacy.rst pyporting.rst cporting.rst curses.rst diff --git a/Doc/howto/sockets.rst b/Doc/howto/sockets.rst --- a/Doc/howto/sockets.rst +++ b/Doc/howto/sockets.rst @@ -88,9 +88,11 @@ serversocket.listen(5) A couple things to notice: we used ``socket.gethostname()`` so that the socket -would be visible to the outside world. If we had used ``s.bind(('', 80))`` or -``s.bind(('localhost', 80))`` or ``s.bind(('127.0.0.1', 80))`` we would still -have a "server" socket, but one that was only visible within the same machine. +would be visible to the outside world. If we had used ``s.bind(('localhost', +80))`` or ``s.bind(('127.0.0.1', 80))`` we would still have a "server" socket, +but one that was only visible within the same machine. ``s.bind(('', 80))`` +specifies that the socket is reachable by any address the machine happens to +have. A second thing to note: low number ports are usually reserved for "well known" services (HTTP, SNMP etc). If you're playing around, use a nice high number (4 diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst --- a/Doc/howto/urllib2.rst +++ b/Doc/howto/urllib2.rst @@ -489,7 +489,8 @@ In the above example we only supplied our ``HTTPBasicAuthHandler`` to ``build_opener``. By default openers have the handlers for normal situations - -- ``ProxyHandler``, ``UnknownHandler``, ``HTTPHandler``, + -- ``ProxyHandler`` (if a proxy setting such as an :envvar:`http_proxy` + environment variable is set), ``UnknownHandler``, ``HTTPHandler``, ``HTTPDefaultErrorHandler``, ``HTTPRedirectHandler``, ``FTPHandler``, ``FileHandler``, ``HTTPErrorProcessor``. @@ -506,10 +507,11 @@ ======= **urllib2** will auto-detect your proxy settings and use those. This is through -the ``ProxyHandler`` which is part of the normal handler chain. Normally that's -a good thing, but there are occasions when it may not be helpful [#]_. One way -to do this is to setup our own ``ProxyHandler``, with no proxies defined. This -is done using similar steps to setting up a `Basic Authentication`_ handler : :: +the ``ProxyHandler``, which is part of the normal handler chain when a proxy +setting is detected. Normally that's a good thing, but there are occasions +when it may not be helpful [#]_. One way to do this is to setup our own +``ProxyHandler``, with no proxies defined. This is done using similar steps to +setting up a `Basic Authentication`_ handler : :: >>> proxy_support = urllib2.ProxyHandler({}) >>> opener = urllib2.build_opener(proxy_support) diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst --- a/Doc/library/codecs.rst +++ b/Doc/library/codecs.rst @@ -1098,86 +1098,112 @@ | utf_8_sig | | all languages | +-----------------+--------------------------------+--------------------------------+ -A number of codecs are specific to Python, so their codec names have no meaning -outside Python. Some of them don't convert from Unicode strings to byte strings, -but instead use the property of the Python codecs machinery that any bijective -function with one argument can be considered as an encoding. +Python Specific Encodings +------------------------- -For the codecs listed below, the result in the "encoding" direction is always a -byte string. The result of the "decoding" direction is listed as operand type in -the table. +A number of predefined codecs are specific to Python, so their codec names have +no meaning outside Python. These are listed in the tables below based on the +expected input and output types (note that while text encodings are the most +common use case for codecs, the underlying codec infrastructure supports +arbitrary data transforms rather than just text encodings). For asymmetric +codecs, the stated purpose describes the encoding direction. -.. tabularcolumns:: |l|p{0.3\linewidth}|l|p{0.3\linewidth}| +The following codecs provide unicode-to-str encoding [#encoding-note]_ and +str-to-unicode decoding [#decoding-note]_, similar to the Unicode text +encodings. -+--------------------+---------------------------+----------------+---------------------------+ -| Codec | Aliases | Operand type | Purpose | -+====================+===========================+================+===========================+ -| base64_codec | base64, base-64 | byte string | Convert operand to MIME | -| | | | base64 | -+--------------------+---------------------------+----------------+---------------------------+ -| bz2_codec | bz2 | byte string | Compress the operand | -| | | | using bz2 | -+--------------------+---------------------------+----------------+---------------------------+ -| hex_codec | hex | byte string | Convert operand to | -| | | | hexadecimal | -| | | | representation, with two | -| | | | digits per byte | -+--------------------+---------------------------+----------------+---------------------------+ -| idna | | Unicode string | Implements :rfc:`3490`, | -| | | | see also | -| | | | :mod:`encodings.idna` | -+--------------------+---------------------------+----------------+---------------------------+ -| mbcs | dbcs | Unicode string | Windows only: Encode | -| | | | operand according to the | -| | | | ANSI codepage (CP_ACP) | -+--------------------+---------------------------+----------------+---------------------------+ -| palmos | | Unicode string | Encoding of PalmOS 3.5 | -+--------------------+---------------------------+----------------+---------------------------+ -| punycode | | Unicode string | Implements :rfc:`3492` | -+--------------------+---------------------------+----------------+---------------------------+ -| quopri_codec | quopri, quoted-printable, | byte string | Convert operand to MIME | -| | quotedprintable | | quoted printable | -+--------------------+---------------------------+----------------+---------------------------+ -| raw_unicode_escape | | Unicode string | Produce a string that is | -| | | | suitable as raw Unicode | -| | | | literal in Python source | -| | | | code | -+--------------------+---------------------------+----------------+---------------------------+ -| rot_13 | rot13 | Unicode string | Returns the Caesar-cypher | -| | | | encryption of the operand | -+--------------------+---------------------------+----------------+---------------------------+ -| string_escape | | byte string | Produce a string that is | -| | | | suitable as string | -| | | | literal in Python source | -| | | | code | -+--------------------+---------------------------+----------------+---------------------------+ -| undefined | | any | Raise an exception for | -| | | | all conversions. Can be | -| | | | used as the system | -| | | | encoding if no automatic | -| | | | :term:`coercion` between | -| | | | byte and Unicode strings | -| | | | is desired. | -+--------------------+---------------------------+----------------+---------------------------+ -| unicode_escape | | Unicode string | Produce a string that is | -| | | | suitable as Unicode | -| | | | literal in Python source | -| | | | code | -+--------------------+---------------------------+----------------+---------------------------+ -| unicode_internal | | Unicode string | Return the internal | -| | | | representation of the | -| | | | operand | -+--------------------+---------------------------+----------------+---------------------------+ -| uu_codec | uu | byte string | Convert the operand using | -| | | | uuencode | -+--------------------+---------------------------+----------------+---------------------------+ -| zlib_codec | zip, zlib | byte string | Compress the operand | -| | | | using gzip | -+--------------------+---------------------------+----------------+---------------------------+ +.. tabularcolumns:: |l|L|L| + ++--------------------+---------------------------+---------------------------+ +| Codec | Aliases | Purpose | ++====================+===========================+===========================+ +| idna | | Implements :rfc:`3490`, | +| | | see also | +| | | :mod:`encodings.idna` | ++--------------------+---------------------------+---------------------------+ +| mbcs | dbcs | Windows only: Encode | +| | | operand according to the | +| | | ANSI codepage (CP_ACP) | ++--------------------+---------------------------+---------------------------+ +| palmos | | Encoding of PalmOS 3.5 | ++--------------------+---------------------------+---------------------------+ +| punycode | | Implements :rfc:`3492` | ++--------------------+---------------------------+---------------------------+ +| raw_unicode_escape | | Produce a string that is | +| | | suitable as raw Unicode | +| | | literal in Python source | +| | | code | ++--------------------+---------------------------+---------------------------+ +| rot_13 | rot13 | Returns the Caesar-cypher | +| | | encryption of the operand | ++--------------------+---------------------------+---------------------------+ +| undefined | | Raise an exception for | +| | | all conversions. Can be | +| | | used as the system | +| | | encoding if no automatic | +| | | :term:`coercion` between | +| | | byte and Unicode strings | +| | | is desired. | ++--------------------+---------------------------+---------------------------+ +| unicode_escape | | Produce a string that is | +| | | suitable as Unicode | +| | | literal in Python source | +| | | code | ++--------------------+---------------------------+---------------------------+ +| unicode_internal | | Return the internal | +| | | representation of the | +| | | operand | ++--------------------+---------------------------+---------------------------+ .. versionadded:: 2.3 The ``idna`` and ``punycode`` encodings. +The following codecs provide str-to-str encoding and decoding +[#decoding-note]_. + +.. tabularcolumns:: |l|L|L|L| + ++--------------------+---------------------------+---------------------------+------------------------------+ +| Codec | Aliases | Purpose | Encoder/decoder | ++====================+===========================+===========================+==============================+ +| base64_codec | base64, base-64 | Convert operand to MIME | :meth:`base64.b64encode`, | +| | | base64 (the result always | :meth:`base64.b64decode` | +| | | includes a trailing | | +| | | ``'\n'``) | | ++--------------------+---------------------------+---------------------------+------------------------------+ +| bz2_codec | bz2 | Compress the operand | :meth:`bz2.compress`, | +| | | using bz2 | :meth:`bz2.decompress` | ++--------------------+---------------------------+---------------------------+------------------------------+ +| hex_codec | hex | Convert operand to | :meth:`base64.b16encode`, | +| | | hexadecimal | :meth:`base64.b16decode` | +| | | representation, with two | | +| | | digits per byte | | ++--------------------+---------------------------+---------------------------+------------------------------+ +| quopri_codec | quopri, quoted-printable, | Convert operand to MIME | :meth:`quopri.encodestring`, | +| | quotedprintable | quoted printable | :meth:`quopri.decodestring` | ++--------------------+---------------------------+---------------------------+------------------------------+ +| string_escape | | Produce a string that is | | +| | | suitable as string | | +| | | literal in Python source | | +| | | code | | ++--------------------+---------------------------+---------------------------+------------------------------+ +| uu_codec | uu | Convert the operand using | :meth:`uu.encode`, | +| | | uuencode | :meth:`uu.decode` | ++--------------------+---------------------------+---------------------------+------------------------------+ +| zlib_codec | zip, zlib | Compress the operand | :meth:`zlib.compress`, | +| | | using gzip | :meth:`zlib.decompress` | ++--------------------+---------------------------+---------------------------+------------------------------+ + +.. [#encoding-note] str objects are also accepted as input in place of unicode + objects. They are implicitly converted to unicode by decoding them using + the default encoding. If this conversion fails, it may lead to encoding + operations raising :exc:`UnicodeDecodeError`. + +.. [#decoding-note] unicode objects are also accepted as input in place of str + objects. They are implicitly converted to str by encoding them using the + default encoding. If this conversion fails, it may lead to decoding + operations raising :exc:`UnicodeEncodeError`. + :mod:`encodings.idna` --- Internationalized Domain Names in Applications ------------------------------------------------------------------------ diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -628,9 +628,7 @@ 'Return a new OrderedDict which maps field names to their values' return OrderedDict(zip(self._fields, self)) - __dict__ = property(_asdict) - - def _replace(_self, **kwds): + def _replace(_self, **kwds): 'Return a new Point object replacing specified fields with new values' result = _self._make(map(kwds.pop, ('x', 'y'), _self)) if kwds: diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst --- a/Doc/library/compileall.rst +++ b/Doc/library/compileall.rst @@ -127,7 +127,7 @@ # Perform same compilation, excluding files in .svn directories. import re - compileall.compile_dir('Lib/', rx=re.compile('/[.]svn'), force=True) + compileall.compile_dir('Lib/', rx=re.compile(r'[/\\][.]svn'), force=True) .. seealso:: diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -1333,7 +1333,7 @@ like ``find_library("c")`` will fail and return ``None``. If wrapping a shared library with :mod:`ctypes`, it *may* be better to determine -the shared library name at development type, and hardcode that into the wrapper +the shared library name at development time, and hardcode that into the wrapper module instead of using :func:`find_library` to locate the library at runtime. diff --git a/Doc/library/io.rst b/Doc/library/io.rst --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -296,6 +296,9 @@ to control the number of lines read: no more lines will be read if the total size (in bytes/characters) of all lines so far exceeds *hint*. + Note that it's already possible to iterate on file objects using ``for + line in file: ...`` without calling ``file.readlines()``. + .. method:: seek(offset, whence=SEEK_SET) Change the stream position to the given byte *offset*. *offset* is diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -732,9 +732,9 @@ next(b, None) return izip(a, b) - def grouper(n, iterable, fillvalue=None): + def grouper(iterable, n, fillvalue=None): "Collect data into fixed-length chunks or blocks" - # grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx + # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx args = [iter(iterable)] * n return izip_longest(fillvalue=fillvalue, *args) diff --git a/Doc/library/numbers.rst b/Doc/library/numbers.rst --- a/Doc/library/numbers.rst +++ b/Doc/library/numbers.rst @@ -73,10 +73,10 @@ .. class:: Integral - Subtypes :class:`Rational` and adds a conversion to :class:`int`. - Provides defaults for :func:`float`, :attr:`~Rational.numerator`, and - :attr:`~Rational.denominator`, and bit-string operations: ``<<``, - ``>>``, ``&``, ``^``, ``|``, ``~``. + Subtypes :class:`Rational` and adds a conversion to :class:`int`. Provides + defaults for :func:`float`, :attr:`~Rational.numerator`, and + :attr:`~Rational.denominator`. Adds abstract methods for ``**`` and + bit-string operations: ``<<``, ``>>``, ``&``, ``^``, ``|``, ``~``. Notes for type implementors diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst --- a/Doc/library/operator.rst +++ b/Doc/library/operator.rst @@ -490,13 +490,22 @@ expect a function argument. -.. function:: attrgetter(attr[, args...]) +.. function:: attrgetter(attr) + attrgetter(*attrs) - Return a callable object that fetches *attr* from its operand. If more than one - attribute is requested, returns a tuple of attributes. After, - ``f = attrgetter('name')``, the call ``f(b)`` returns ``b.name``. After, - ``f = attrgetter('name', 'date')``, the call ``f(b)`` returns ``(b.name, - b.date)``. Equivalent to:: + Return a callable object that fetches *attr* from its operand. + If more than one attribute is requested, returns a tuple of attributes. + The attribute names can also contain dots. For example: + + * After ``f = attrgetter('name')``, the call ``f(b)`` returns ``b.name``. + + * After ``f = attrgetter('name', 'date')``, the call ``f(b)`` returns + ``(b.name, b.date)``. + + * After ``f = attrgetter('name.first', 'name.last')``, the call ``f(b)`` + returns ``(r.name.first, r.name.last)``. + + Equivalent to:: def attrgetter(*items): if len(items) == 1: @@ -514,9 +523,6 @@ return obj - The attribute names can also contain dots; after ``f = attrgetter('date.month')``, - the call ``f(b)`` returns ``b.date.month``. - .. versionadded:: 2.4 .. versionchanged:: 2.5 @@ -526,11 +532,19 @@ Added support for dotted attributes. -.. function:: itemgetter(item[, args...]) +.. function:: itemgetter(item) + itemgetter(*items) Return a callable object that fetches *item* from its operand using the operand's :meth:`__getitem__` method. If multiple items are specified, - returns a tuple of lookup values. Equivalent to:: + returns a tuple of lookup values. For example: + + * After ``f = itemgetter(2)``, the call ``f(r)`` returns ``r[2]``. + + * After ``g = itemgetter(2, 5, 3)``, the call ``g(r)`` returns + ``(r[2], r[5], r[3])``. + + Equivalent to:: def itemgetter(*items): if len(items) == 1: @@ -573,9 +587,14 @@ Return a callable object that calls the method *name* on its operand. If additional arguments and/or keyword arguments are given, they will be given - to the method as well. After ``f = methodcaller('name')``, the call ``f(b)`` - returns ``b.name()``. After ``f = methodcaller('name', 'foo', bar=1)``, the - call ``f(b)`` returns ``b.name('foo', bar=1)``. Equivalent to:: + to the method as well. For example: + + * After ``f = methodcaller('name')``, the call ``f(b)`` returns ``b.name()``. + + * After ``f = methodcaller('name', 'foo', bar=1)``, the call ``f(b)`` + returns ``b.name('foo', bar=1)``. + + Equivalent to:: def methodcaller(name, *args, **kwargs): def caller(obj): diff --git a/Doc/library/resource.rst b/Doc/library/resource.rst --- a/Doc/library/resource.rst +++ b/Doc/library/resource.rst @@ -42,6 +42,11 @@ this module for those platforms. +.. data:: RLIM_INFINITY + + Constant used to represent the the limit for an unlimited resource. + + .. function:: getrlimit(resource) Returns a tuple ``(soft, hard)`` with the current soft and hard limits of @@ -53,12 +58,20 @@ Sets new limits of consumption of *resource*. The *limits* argument must be a tuple ``(soft, hard)`` of two integers describing the new limits. A value of - ``-1`` can be used to specify the maximum possible upper limit. + :data:`~resource.RLIM_INFINITY` can be used to request a limit that is + unlimited. Raises :exc:`ValueError` if an invalid resource is specified, if the new soft - limit exceeds the hard limit, or if a process tries to raise its hard limit - (unless the process has an effective UID of super-user). Can also raise - :exc:`error` if the underlying system call fails. + limit exceeds the hard limit, or if a process tries to raise its hard limit. + Specifying a limit of :data:`~resource.RLIM_INFINITY` when the hard or + system limit for that resource is not unlimited will result in a + :exc:`ValueError`. A process with the effective UID of super-user can + request any valid limit value, including unlimited, but :exc:`ValueError` + will still be raised if the requested limit exceeds the system imposed + limit. + + ``setrlimit`` may also raise :exc:`error` if the underlying system call + fails. These symbols define resources whose consumption can be controlled using the :func:`setrlimit` and :func:`getrlimit` functions described below. The values of diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -28,7 +28,7 @@ The Python interface is a straightforward transliteration of the Unix system call and library interface for sockets to Python's object-oriented style: the -:func:`socket` function returns a :dfn:`socket object` whose methods implement +:func:`.socket` function returns a :dfn:`socket object` whose methods implement the various socket system calls. Parameter types are somewhat higher-level than in the C interface: as with :meth:`read` and :meth:`write` operations on Python files, buffer allocation on receive operations is automatic, and buffer length @@ -146,7 +146,7 @@ AF_INET6 These constants represent the address (and protocol) families, used for the - first argument to :func:`socket`. If the :const:`AF_UNIX` constant is not + first argument to :func:`.socket`. If the :const:`AF_UNIX` constant is not defined then this protocol is unsupported. @@ -252,7 +252,7 @@ ``(family, socktype, proto, canonname, sockaddr)`` In these tuples, *family*, *socktype*, *proto* are all integers and are - meant to be passed to the :func:`socket` function. *canonname* will be + meant to be passed to the :func:`.socket` function. *canonname* will be a string representing the canonical name of the *host* if :const:`AI_CANONNAME` is part of the *flags* argument; else *canonname* will be empty. *sockaddr* is a tuple describing a socket address, whose @@ -343,7 +343,7 @@ .. function:: getprotobyname(protocolname) Translate an Internet protocol name (for example, ``'icmp'``) to a constant - suitable for passing as the (optional) third argument to the :func:`socket` + suitable for passing as the (optional) third argument to the :func:`.socket` function. This is usually only needed for sockets opened in "raw" mode (:const:`SOCK_RAW`); for the normal socket modes, the correct protocol is chosen automatically if the protocol is omitted or zero. @@ -377,7 +377,7 @@ Build a pair of connected socket objects using the given address family, socket type, and protocol number. Address family, socket type, and protocol number are - as for the :func:`socket` function above. The default family is :const:`AF_UNIX` + as for the :func:`.socket` function above. The default family is :const:`AF_UNIX` if defined on the platform; otherwise, the default is :const:`AF_INET`. Availability: Unix. @@ -388,7 +388,7 @@ Duplicate the file descriptor *fd* (an integer as returned by a file object's :meth:`fileno` method) and build a socket object from the result. Address - family, socket type and protocol number are as for the :func:`socket` function + family, socket type and protocol number are as for the :func:`.socket` function above. The file descriptor should refer to a socket, but this is not checked --- subsequent operations on the object may fail if the file descriptor is invalid. This function is rarely needed, but can be used to get or set socket options on @@ -861,10 +861,10 @@ Here are four minimal example programs using the TCP/IP protocol: a server that echoes all data that it receives back (servicing only one client), and a client -using it. Note that a server must perform the sequence :func:`socket`, +using it. Note that a server must perform the sequence :func:`.socket`, :meth:`~socket.bind`, :meth:`~socket.listen`, :meth:`~socket.accept` (possibly repeating the :meth:`~socket.accept` to service more than one client), while a -client only needs the sequence :func:`socket`, :meth:`~socket.connect`. Also +client only needs the sequence :func:`.socket`, :meth:`~socket.connect`. Also note that the server does not :meth:`~socket.sendall`/:meth:`~socket.recv` on the socket it is listening on but on the new socket returned by :meth:`~socket.accept`. diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -328,7 +328,7 @@ If there is no certificate for the peer on the other end of the connection, returns ``None``. - If the parameter ``binary_form`` is :const:`False`, and a certificate was + If the ``binary_form`` parameter is :const:`False`, and a certificate was received from the peer, this method returns a :class:`dict` instance. If the certificate was not validated, the dict is empty. If the certificate was validated, it returns a dict with the keys ``subject`` (the principal for @@ -354,10 +354,16 @@ If the ``binary_form`` parameter is :const:`True`, and a certificate was provided, this method returns the DER-encoded form of the entire certificate as a sequence of bytes, or :const:`None` if the peer did not provide a - certificate. This return value is independent of validation; if validation - was required (:const:`CERT_OPTIONAL` or :const:`CERT_REQUIRED`), it will have - been validated, but if :const:`CERT_NONE` was used to establish the - connection, the certificate, if present, will not have been validated. + certificate. Whether the peer provides a certificate depends on the SSL + socket's role: + + * for a client SSL socket, the server will always provide a certificate, + regardless of whether validation was required; + + * for a server SSL socket, the client will only provide a certificate + when requested by the server; therefore :meth:`getpeercert` will return + :const:`None` if you used :const:`CERT_NONE` (rather than + :const:`CERT_OPTIONAL` or :const:`CERT_REQUIRED`). .. method:: SSLSocket.cipher() diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -26,7 +26,7 @@ Some operations are supported by several object types; in particular, practically all objects can be compared, tested for truth value, and converted -to a string (with the :func:`repr` function or the slightly different +to a string (with the :ref:`repr() ` function or the slightly different :func:`str` function). The latter function is implicitly used when an object is written by the :func:`print` function. @@ -931,10 +931,22 @@ .. method:: str.expandtabs([tabsize]) Return a copy of the string where all tab characters are replaced by one or - more spaces, depending on the current column and the given tab size. The - column number is reset to zero after each newline occurring in the string. - If *tabsize* is not given, a tab size of ``8`` characters is assumed. This - doesn't understand other non-printing characters or escape sequences. + more spaces, depending on the current column and the given tab size. Tab + positions occur every *tabsize* characters (default is 8, giving tab + positions at columns 0, 8, 16 and so on). To expand the string, the current + column is set to zero and the string is examined character by character. If + the character is a tab (``\t``), one or more space characters are inserted + in the result until the current column is equal to the next tab position. + (The tab character itself is not copied.) If the character is a newline + (``\n``) or return (``\r``), it is copied and the current column is reset to + zero. Any other character is copied unchanged and the current column is + incremented by one regardless of how the character is represented when + printed. + + >>> '01\t012\t0123\t01234'.expandtabs() + '01 012 0123 01234' + >>> '01\t012\t0123\t01234'.expandtabs(4) + '01 012 0123 01234' .. method:: str.find(sub[, start[, end]]) @@ -1452,7 +1464,7 @@ | | character string). | | +------------+-----------------------------------------------------+-------+ | ``'r'`` | String (converts any Python object using | \(5) | -| | :func:`repr`). | | +| | :ref:`repr() `). | | +------------+-----------------------------------------------------+-------+ | ``'s'`` | String (converts any Python object using | \(6) | | | :func:`str`). | | @@ -1837,8 +1849,8 @@ based on their members. For example, ``set('abc') == frozenset('abc')`` returns ``True`` and so does ``set('abc') in set([frozenset('abc')])``. - The subset and equality comparisons do not generalize to a complete ordering - function. For example, any two disjoint sets are not equal and are not + The subset and equality comparisons do not generalize to a total ordering + function. For example, any two non-empty disjoint sets are not equal and are not subsets of each other, so *all* of the following return ``False``: ``ab``. Accordingly, sets do not implement the :meth:`__cmp__` method. diff --git a/Doc/library/string.rst b/Doc/library/string.rst --- a/Doc/library/string.rst +++ b/Doc/library/string.rst @@ -453,12 +453,13 @@ +=========+==========================================================+ | ``'e'`` | Exponent notation. Prints the number in scientific | | | notation using the letter 'e' to indicate the exponent. | + | | The default precision is ``6``. | +---------+----------------------------------------------------------+ | ``'E'`` | Exponent notation. Same as ``'e'`` except it uses an | | | upper case 'E' as the separator character. | +---------+----------------------------------------------------------+ | ``'f'`` | Fixed point. Displays the number as a fixed-point | - | | number. | + | | number. The default precision is ``6``. | +---------+----------------------------------------------------------+ | ``'F'`` | Fixed point. Same as ``'f'``. | +---------+----------------------------------------------------------+ @@ -484,7 +485,7 @@ | | the precision. | | | | | | A precision of ``0`` is treated as equivalent to a | - | | precision of ``1``. | + | | precision of ``1``. The default precision is ``6``. | +---------+----------------------------------------------------------+ | ``'G'`` | General format. Same as ``'g'`` except switches to | | | ``'E'`` if the number gets too large. The | diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -1075,6 +1075,8 @@ sorted(actual))`` but it works with sequences of unhashable objects as well. + In Python 3, this method is named ``assertCountEqual``. + .. versionadded:: 2.7 diff --git a/Doc/library/urllib.rst b/Doc/library/urllib.rst --- a/Doc/library/urllib.rst +++ b/Doc/library/urllib.rst @@ -280,6 +280,13 @@ find it, looks for proxy information from Mac OSX System Configuration for Mac OS X and Windows Systems Registry for Windows. +.. note:: + urllib also exposes certain utility functions like splittype, splithost and + others parsing url into various components. But it is recommended to use + :mod:`urlparse` for parsing urls than using these functions directly. + Python 3 does not expose these helper functions from :mod:`urllib.parse` + module. + URL Opener objects ------------------ diff --git a/Doc/library/urllib2.rst b/Doc/library/urllib2.rst --- a/Doc/library/urllib2.rst +++ b/Doc/library/urllib2.rst @@ -60,8 +60,10 @@ default installed global :class:`OpenerDirector` uses :class:`UnknownHandler` to ensure this never happens). - In addition, default installed :class:`ProxyHandler` makes sure the requests - are handled through the proxy when they are set. + In addition, if proxy settings are detected (for example, when a ``*_proxy`` + environment variable like :envvar:`http_proxy` is set), + :class:`ProxyHandler` is default installed and makes sure the requests are + handled through the proxy. .. versionchanged:: 2.6 *timeout* was added. @@ -83,7 +85,8 @@ subclasses of :class:`BaseHandler` (in which case it must be possible to call the constructor without any parameters). Instances of the following classes will be in front of the *handler*\s, unless the *handler*\s contain them, - instances of them or subclasses of them: :class:`ProxyHandler`, + instances of them or subclasses of them: :class:`ProxyHandler` (if proxy + settings are detected), :class:`UnknownHandler`, :class:`HTTPHandler`, :class:`HTTPDefaultErrorHandler`, :class:`HTTPRedirectHandler`, :class:`FTPHandler`, :class:`FileHandler`, :class:`HTTPErrorProcessor`. @@ -202,9 +205,9 @@ Cause requests to go through a proxy. If *proxies* is given, it must be a dictionary mapping protocol names to URLs of proxies. The default is to read the list of proxies from the environment variables - :envvar:`_proxy`. If no proxy environment variables are set, in a - Windows environment, proxy settings are obtained from the registry's - Internet Settings section and in a Mac OS X environment, proxy information + :envvar:`_proxy`. If no proxy environment variables are set, then + in a Windows environment proxy settings are obtained from the registry's + Internet Settings section, and in a Mac OS X environment proxy information is retrieved from the OS X System Configuration Framework. To disable autodetected proxy pass an empty dictionary. diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -623,9 +623,8 @@ single: im_self (method attribute) When a user-defined method object is created by retrieving a class method object - from a class or instance, its :attr:`im_self` attribute is the class itself (the - same as the :attr:`im_class` attribute), and its :attr:`im_func` attribute is - the function object underlying the class method. + from a class or instance, its :attr:`im_self` attribute is the class itself, and + its :attr:`im_func` attribute is the function object underlying the class method. When an unbound user-defined method object is called, the underlying function (:attr:`im_func`) is called, with the restriction that the first argument must @@ -797,8 +796,8 @@ associated class is either :class:`C` or one of its base classes, it is transformed into an unbound user-defined method object whose :attr:`im_class` attribute is :class:`C`. When it would yield a class method object, it is - transformed into a bound user-defined method object whose :attr:`im_class` - and :attr:`im_self` attributes are both :class:`C`. When it would yield a + transformed into a bound user-defined method object whose + :attr:`im_self` attribute is :class:`C`. When it would yield a static method object, it is transformed into the object wrapped by the static method object. See section :ref:`descriptors` for another way in which attributes retrieved from a class may differ from those actually contained in diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -96,14 +96,13 @@ definition begins with two or more underscore characters and does not end in two or more underscores, it is considered a :dfn:`private name` of that class. Private names are transformed to a longer form before code is generated for -them. The transformation inserts the class name in front of the name, with -leading underscores removed, and a single underscore inserted in front of the -class name. For example, the identifier ``__spam`` occurring in a class named -``Ham`` will be transformed to ``_Ham__spam``. This transformation is -independent of the syntactical context in which the identifier is used. If the -transformed name is extremely long (longer than 255 characters), implementation -defined truncation may happen. If the class name consists only of underscores, -no transformation is done. +them. The transformation inserts the class name, with leading underscores +removed and a single underscore inserted, in front of the name. For example, +the identifier ``__spam`` occurring in a class named ``Ham`` will be transformed +to ``_Ham__spam``. This transformation is independent of the syntactical +context in which the identifier is used. If the transformed name is extremely +long (longer than 255 characters), implementation defined truncation may happen. +If the class name consists only of underscores, no transformation is done. diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst --- a/Doc/reference/simple_stmts.rst +++ b/Doc/reference/simple_stmts.rst @@ -511,6 +511,9 @@ :meth:`close` method will be called, allowing any pending :keyword:`finally` clauses to execute. +For full details of :keyword:`yield` semantics, refer to the :ref:`yieldexpr` +section. + .. note:: In Python 2.2, the :keyword:`yield` statement was only allowed when the diff --git a/Doc/tutorial/inputoutput.rst b/Doc/tutorial/inputoutput.rst --- a/Doc/tutorial/inputoutput.rst +++ b/Doc/tutorial/inputoutput.rst @@ -215,10 +215,6 @@ >>> print 'The value of PI is approximately %5.3f.' % math.pi The value of PI is approximately 3.142. -Since :meth:`str.format` is quite new, a lot of Python code still uses the ``%`` -operator. However, because this old style of formatting will eventually be -removed from the language, :meth:`str.format` should generally be used. - More information can be found in the :ref:`string-formatting` section. @@ -295,18 +291,8 @@ >>> f.readline() '' -``f.readlines()`` returns a list containing all the lines of data in the file. -If given an optional parameter *sizehint*, it reads that many bytes from the -file and enough more to complete a line, and returns the lines from that. This -is often used to allow efficient reading of a large file by lines, but without -having to load the entire file in memory. Only complete lines will be returned. -:: - - >>> f.readlines() - ['This is the first line of the file.\n', 'Second line of the file\n'] - -An alternative approach to reading lines is to loop over the file object. This is -memory efficient, fast, and leads to simpler code:: +For reading lines from a file, you can loop over the file object. This is memory +efficient, fast, and leads to simple code:: >>> for line in f: print line, @@ -314,9 +300,8 @@ This is the first line of the file. Second line of the file -The alternative approach is simpler but does not provide as fine-grained -control. Since the two approaches manage line buffering differently, they -should not be mixed. +If you want to read all the lines of a file in a list you can also use +``list(f)`` or ``f.readlines()``. ``f.write(string)`` writes the contents of *string* to the file, returning ``None``. :: diff --git a/Doc/tutorial/modules.rst b/Doc/tutorial/modules.rst --- a/Doc/tutorial/modules.rst +++ b/Doc/tutorial/modules.rst @@ -71,7 +71,8 @@ A module can contain executable statements as well as function definitions. These statements are intended to initialize the module. They are executed only -the *first* time the module is imported somewhere. [#]_ +the *first* time the module name is encountered in an import statement. [#]_ +(They are also run if the file is executed as a script.) Each module has its own private symbol table, which is used as the global symbol table by all functions defined in the module. Thus, the author of a module can @@ -550,6 +551,6 @@ .. rubric:: Footnotes .. [#] In fact function definitions are also 'statements' that are 'executed'; the - execution of a module-level function enters the function name in the module's - global symbol table. + execution of a module-level function definition enters the function name in + the module's global symbol table. diff --git a/Doc/using/mac.rst b/Doc/using/mac.rst --- a/Doc/using/mac.rst +++ b/Doc/using/mac.rst @@ -25,14 +25,14 @@ Getting and Installing MacPython ================================ -Mac OS X 10.5 comes with Python 2.5.1 pre-installed by Apple. If you wish, you +Mac OS X 10.8 comes with Python 2.7 pre-installed by Apple. If you wish, you are invited to install the most recent version of Python from the Python website (http://www.python.org). A current "universal binary" build of Python, which runs natively on the Mac's new Intel and legacy PPC CPU's, is available there. What you get after installing is a number of things: -* A :file:`MacPython 2.5` folder in your :file:`Applications` folder. In here +* A :file:`MacPython 2.7` folder in your :file:`Applications` folder. In here you find IDLE, the development environment that is a standard part of official Python distributions; PythonLauncher, which handles double-clicking Python scripts from the Finder; and the "Build Applet" tool, which allows you to @@ -100,7 +100,7 @@ anything that has a GUI) need to be run in a special way. Use :program:`pythonw` instead of :program:`python` to start such scripts. -With Python 2.5, you can use either :program:`python` or :program:`pythonw`. +With Python 2.7, you can use either :program:`python` or :program:`pythonw`. Configuration @@ -133,13 +133,11 @@ There are several methods to install additional Python packages: -* http://pythonmac.org/packages/ contains selected compiled packages for Python - 2.5, 2.4, and 2.3. - * Packages can be installed via the standard Python distutils mode (``python setup.py install``). -* Many packages can also be installed via the :program:`setuptools` extension. +* Many packages can also be installed via the :program:`setuptools` extension + or :program:`pip` wrapper, see http://www.pip-installer.org/. GUI Programming on the Mac @@ -167,7 +165,7 @@ Distributing Python Applications on the Mac =========================================== -The "Build Applet" tool that is placed in the MacPython 2.5 folder is fine for +The "Build Applet" tool that is placed in the MacPython 2.7 folder is fine for packaging small Python scripts on your own machine to run as a standard Mac application. This tool, however, is not robust enough to distribute Python applications to other users. diff --git a/Include/object.h b/Include/object.h --- a/Include/object.h +++ b/Include/object.h @@ -984,16 +984,22 @@ #define PyTrash_UNWIND_LEVEL 50 +/* Note the workaround for when the thread state is NULL (issue #17703) */ #define Py_TRASHCAN_SAFE_BEGIN(op) \ do { \ PyThreadState *_tstate = PyThreadState_GET(); \ - if (_tstate->trash_delete_nesting < PyTrash_UNWIND_LEVEL) { \ - ++_tstate->trash_delete_nesting; + if (!_tstate || \ + _tstate->trash_delete_nesting < PyTrash_UNWIND_LEVEL) { \ + if (_tstate) \ + ++_tstate->trash_delete_nesting; /* The body of the deallocator is here. */ #define Py_TRASHCAN_SAFE_END(op) \ - --_tstate->trash_delete_nesting; \ - if (_tstate->trash_delete_later && _tstate->trash_delete_nesting <= 0) \ - _PyTrash_thread_destroy_chain(); \ + if (_tstate) { \ + --_tstate->trash_delete_nesting; \ + if (_tstate->trash_delete_later \ + && _tstate->trash_delete_nesting <= 0) \ + _PyTrash_thread_destroy_chain(); \ + } \ } \ else \ _PyTrash_thread_deposit_object((PyObject*)op); \ diff --git a/Include/patchlevel.h b/Include/patchlevel.h --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -22,12 +22,12 @@ /*--start constants--*/ #define PY_MAJOR_VERSION 2 #define PY_MINOR_VERSION 7 -#define PY_MICRO_VERSION 4 +#define PY_MICRO_VERSION 5 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL #define PY_RELEASE_SERIAL 0 /* Version as a string */ -#define PY_VERSION "2.7.4+" +#define PY_VERSION "2.7.5+" /*--end constants--*/ /* Subversion Revision number of this file (not of the repository). Empty diff --git a/Lib/_weakrefset.py b/Lib/_weakrefset.py --- a/Lib/_weakrefset.py +++ b/Lib/_weakrefset.py @@ -171,6 +171,12 @@ return NotImplemented return self.data == set(ref(item) for item in other) + def __ne__(self, other): + opposite = self.__eq__(other) + if opposite is NotImplemented: + return NotImplemented + return not opposite + def symmetric_difference(self, other): newset = self.copy() newset.symmetric_difference_update(other) diff --git a/Lib/collections.py b/Lib/collections.py --- a/Lib/collections.py +++ b/Lib/collections.py @@ -259,8 +259,6 @@ 'Return a new OrderedDict which maps field names to their values' return OrderedDict(zip(self._fields, self)) - __dict__ = property(_asdict) - def _replace(_self, **kwds): 'Return a new {typename} object replacing specified fields with new values' result = _self._make(map(kwds.pop, {field_names!r}, _self)) diff --git a/Lib/ctypes/test/__init__.py b/Lib/ctypes/test/__init__.py --- a/Lib/ctypes/test/__init__.py +++ b/Lib/ctypes/test/__init__.py @@ -62,7 +62,7 @@ continue try: mod = __import__(modname, globals(), locals(), ['*']) - except ResourceDenied, detail: + except (ResourceDenied, unittest.SkipTest) as detail: skipped.append(modname) if verbosity > 1: print >> sys.stderr, "Skipped %s: %s" % (modname, detail) diff --git a/Lib/ctypes/test/test_wintypes.py b/Lib/ctypes/test/test_wintypes.py new file mode 100644 --- /dev/null +++ b/Lib/ctypes/test/test_wintypes.py @@ -0,0 +1,43 @@ +import sys +import unittest + +if not sys.platform.startswith('win'): + raise unittest.SkipTest('Windows-only test') + +from ctypes import * +from ctypes import wintypes + +class WinTypesTest(unittest.TestCase): + def test_variant_bool(self): + # reads 16-bits from memory, anything non-zero is True + for true_value in (1, 32767, 32768, 65535, 65537): + true = POINTER(c_int16)(c_int16(true_value)) + value = cast(true, POINTER(wintypes.VARIANT_BOOL)) + self.assertEqual(repr(value.contents), 'VARIANT_BOOL(True)') + + vb = wintypes.VARIANT_BOOL() + self.assertIs(vb.value, False) + vb.value = True + self.assertIs(vb.value, True) + vb.value = true_value + self.assertIs(vb.value, True) + + for false_value in (0, 65536, 262144, 2**33): + false = POINTER(c_int16)(c_int16(false_value)) + value = cast(false, POINTER(wintypes.VARIANT_BOOL)) + self.assertEqual(repr(value.contents), 'VARIANT_BOOL(False)') + + # allow any bool conversion on assignment to value + for set_value in (65536, 262144, 2**33): + vb = wintypes.VARIANT_BOOL() + vb.value = set_value + self.assertIs(vb.value, True) + + vb = wintypes.VARIANT_BOOL() + vb.value = [2, 3] + self.assertIs(vb.value, True) + vb.value = [] + self.assertIs(vb.value, False) + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py --- a/Lib/ctypes/util.py +++ b/Lib/ctypes/util.py @@ -93,7 +93,7 @@ fdout, ccout = tempfile.mkstemp() os.close(fdout) cmd = 'if type gcc >/dev/null 2>&1; then CC=gcc; elif type cc >/dev/null 2>&1; then CC=cc;else exit 10; fi;' \ - '$CC -Wl,-t -o ' + ccout + ' 2>&1 -l' + name + 'LANG=C LC_ALL=C $CC -Wl,-t -o ' + ccout + ' 2>&1 -l' + name try: f = os.popen(cmd) try: diff --git a/Lib/distutils/__init__.py b/Lib/distutils/__init__.py --- a/Lib/distutils/__init__.py +++ b/Lib/distutils/__init__.py @@ -15,5 +15,5 @@ # Updated automatically by the Python release process. # #--start constants-- -__version__ = "2.7.4" +__version__ = "2.7.5" #--end constants-- diff --git a/Lib/filecmp.py b/Lib/filecmp.py --- a/Lib/filecmp.py +++ b/Lib/filecmp.py @@ -268,7 +268,7 @@ def _cmp(a, b, sh, abs=abs, cmp=cmp): try: return not abs(cmp(a, b, sh)) - except os.error: + except (os.error, IOError): return 2 diff --git a/Lib/gzip.py b/Lib/gzip.py --- a/Lib/gzip.py +++ b/Lib/gzip.py @@ -21,6 +21,9 @@ # or unsigned. output.write(struct.pack(" self.extrasize: - if not self._read(readsize): - if size > self.extrasize: - size = self.extrasize - break - readsize = min(self.max_read_chunk, readsize * 2) + try: + while size > self.extrasize: + self._read(readsize) + readsize = min(self.max_read_chunk, readsize * 2) + except EOFError: + if size > self.extrasize: + size = self.extrasize offset = self.offset - self.extrastart chunk = self.extrabuf[offset: offset + size] @@ -275,7 +277,7 @@ def _read(self, size=1024): if self.fileobj is None: - return False + raise EOFError, "Reached EOF" if self._new_member: # If the _new_member flag is set, we have to @@ -286,7 +288,7 @@ pos = self.fileobj.tell() # Save current position self.fileobj.seek(0, 2) # Seek to end of file if pos == self.fileobj.tell(): - return False + raise EOFError, "Reached EOF" else: self.fileobj.seek( pos ) # Return to original position @@ -303,10 +305,9 @@ if buf == "": uncompress = self.decompress.flush() - self.fileobj.seek(-len(self.decompress.unused_data), 1) self._read_eof() self._add_read_data( uncompress ) - return False + raise EOFError, 'Reached EOF' uncompress = self.decompress.decompress(buf) self._add_read_data( uncompress ) @@ -316,14 +317,13 @@ # so seek back to the start of the unused data, finish up # this member, and read a new gzip header. # (The number of bytes to seek back is the length of the unused - # data) - self.fileobj.seek(-len(self.decompress.unused_data), 1) + # data, minus 8 because _read_eof() will rewind a further 8 bytes) + self.fileobj.seek( -len(self.decompress.unused_data)+8, 1) # Check the CRC and file size, and set the flag so we read # a new member on the next call self._read_eof() self._new_member = True - return True def _add_read_data(self, data): self.crc = zlib.crc32(data, self.crc) & 0xffffffffL @@ -334,11 +334,14 @@ self.size = self.size + len(data) def _read_eof(self): - # We've read to the end of the file. + # We've read to the end of the file, so we have to rewind in order + # to reread the 8 bytes containing the CRC and the file size. # We check the that the computed CRC and size of the # uncompressed data matches the stored values. Note that the size # stored is the true file size mod 2**32. - crc32, isize = struct.unpack(">", "") + text.event_add("<>", "") + def highlight_fix(focus): + sel_range = text.tag_ranges("sel") + if sel_range: + if focus == 'out': + HILITE_CONFIG = idleConf.GetHighlight( + idleConf.CurrentTheme(), 'hilite') + text.tag_config("sel_fix", HILITE_CONFIG) + text.tag_raise("sel_fix") + text.tag_add("sel_fix", *sel_range) + elif focus == 'in': + text.tag_remove("sel_fix", "1.0", "end") + + text.bind("<>", + lambda ev: highlight_fix("out")) + text.bind("<>", + lambda ev: highlight_fix("in")) + + def _filename_to_unicode(self, filename): """convert filename to unicode in order to display it in Tk""" if isinstance(filename, unicode) or not filename: @@ -437,7 +467,6 @@ ] if macosxSupport.runningAsOSXApp(): - del menu_specs[-3] menu_specs[-2] = ("windows", "_Window") diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py --- a/Lib/idlelib/PyShell.py +++ b/Lib/idlelib/PyShell.py @@ -844,7 +844,6 @@ ] if macosxSupport.runningAsOSXApp(): - del menu_specs[-3] menu_specs[-2] = ("windows", "_Window") diff --git a/Lib/idlelib/help.txt b/Lib/idlelib/help.txt --- a/Lib/idlelib/help.txt +++ b/Lib/idlelib/help.txt @@ -233,8 +233,7 @@ Python Shell window: Control-c interrupts executing command. - Control-d sends end-of-file; closes window if typed at >>> prompt - (this is Control-z on Windows). + Control-d sends end-of-file; closes window if typed at >>> prompt. Command history: diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py --- a/Lib/idlelib/idlever.py +++ b/Lib/idlelib/idlever.py @@ -1,1 +1,1 @@ -IDLE_VERSION = "2.7.4" +IDLE_VERSION = "2.7.5" diff --git a/Lib/idlelib/run.py b/Lib/idlelib/run.py --- a/Lib/idlelib/run.py +++ b/Lib/idlelib/run.py @@ -264,6 +264,11 @@ IOBinding.encoding) sys.stderr = PyShell.PseudoOutputFile(self.console, "stderr", IOBinding.encoding) + + # Keep a reference to stdin so that it won't try to exit IDLE if + # sys.stdin gets changed from within IDLE's shell. See issue17838. + self._keep_stdin = sys.stdin + self.interp = self.get_remote_proxy("interp") rpc.RPCHandler.getresponse(self, myseq=None, wait=0.05) diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py --- a/Lib/logging/handlers.py +++ b/Lib/logging/handlers.py @@ -1,4 +1,4 @@ -# Copyright 2001-2012 by Vinay Sajip. All Rights Reserved. +# Copyright 2001-2013 by Vinay Sajip. All Rights Reserved. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose and without fee is hereby granted, @@ -18,7 +18,7 @@ Additional handlers for the logging package for Python. The core package is based on PEP 282 and comments thereto in comp.lang.python. -Copyright (C) 2001-2012 Vinay Sajip. All Rights Reserved. +Copyright (C) 2001-2013 Vinay Sajip. All Rights Reserved. To use, simply 'import logging.handlers' and log away! """ @@ -737,13 +737,17 @@ } def __init__(self, address=('localhost', SYSLOG_UDP_PORT), - facility=LOG_USER, socktype=socket.SOCK_DGRAM): + facility=LOG_USER, socktype=None): """ Initialize a handler. If address is specified as a string, a UNIX socket is used. To log to a local syslogd, "SysLogHandler(address="/dev/log")" can be used. - If facility is not specified, LOG_USER is used. + If facility is not specified, LOG_USER is used. If socktype is + specified as socket.SOCK_DGRAM or socket.SOCK_STREAM, that specific + socket type will be used. For Unix sockets, you can also specify a + socktype of None, in which case socket.SOCK_DGRAM will be used, falling + back to socket.SOCK_STREAM. """ logging.Handler.__init__(self) @@ -756,18 +760,37 @@ self._connect_unixsocket(address) else: self.unixsocket = 0 + if socktype is None: + socktype = socket.SOCK_DGRAM self.socket = socket.socket(socket.AF_INET, socktype) if socktype == socket.SOCK_STREAM: self.socket.connect(address) + self.socktype = socktype self.formatter = None def _connect_unixsocket(self, address): - self.socket = socket.socket(socket.AF_UNIX, self.socktype) + use_socktype = self.socktype + if use_socktype is None: + use_socktype = socket.SOCK_DGRAM + self.socket = socket.socket(socket.AF_UNIX, use_socktype) try: self.socket.connect(address) + # it worked, so set self.socktype to the used type + self.socktype = use_socktype except socket.error: self.socket.close() - raise + if self.socktype is not None: + # user didn't specify falling back, so fail + raise + use_socktype = socket.SOCK_STREAM + self.socket = socket.socket(socket.AF_UNIX, use_socktype) + try: + self.socket.connect(address) + # it worked, so set self.socktype to the used type + self.socktype = use_socktype + except socket.error: + self.socket.close() + raise # curious: when talking to the unix-domain '/dev/log' socket, a # zero-terminator seems to be required. this string is placed @@ -833,6 +856,7 @@ try: self.socket.send(msg) except socket.error: + self.socket.close() # See issue 17981 self._connect_unixsocket(self.address) self.socket.send(msg) elif self.socktype == socket.SOCK_DGRAM: diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -386,12 +386,14 @@ '.taz': '.tar.gz', '.tz': '.tar.gz', '.tbz2': '.tar.bz2', + '.txz': '.tar.xz', } encodings_map = { '.gz': 'gzip', '.Z': 'compress', '.bz2': 'bzip2', + '.xz': 'xz', } # Before adding new types, make sure they are either registered with IANA, diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py --- a/Lib/multiprocessing/pool.py +++ b/Lib/multiprocessing/pool.py @@ -565,6 +565,8 @@ self._cond.release() del self._cache[self._job] +AsyncResult = ApplyResult # create alias -- see #17805 + # # Class whose instances are returned by `Pool.map_async()` # diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py --- a/Lib/multiprocessing/synchronize.py +++ b/Lib/multiprocessing/synchronize.py @@ -226,7 +226,7 @@ num_waiters = (self._sleeping_count._semlock._get_value() - self._woken_count._semlock._get_value()) except Exception: - num_waiters = 'unkown' + num_waiters = 'unknown' return '' % (self._lock, num_waiters) def wait(self, timeout=None): diff --git a/Lib/multiprocessing/util.py b/Lib/multiprocessing/util.py --- a/Lib/multiprocessing/util.py +++ b/Lib/multiprocessing/util.py @@ -329,10 +329,13 @@ class ForkAwareThreadLock(object): def __init__(self): + self._reset() + register_after_fork(self, ForkAwareThreadLock._reset) + + def _reset(self): self._lock = threading.Lock() self.acquire = self._lock.acquire self.release = self._lock.release - register_after_fork(self, ForkAwareThreadLock.__init__) class ForkAwareLocal(threading.local): def __init__(self): diff --git a/Lib/pickle.py b/Lib/pickle.py --- a/Lib/pickle.py +++ b/Lib/pickle.py @@ -962,7 +962,7 @@ rep = self.readline()[:-1] for q in "\"'": # double or single quote if rep.startswith(q): - if not rep.endswith(q): + if len(rep) < 2 or not rep.endswith(q): raise ValueError, "insecure string pickle" rep = rep[len(q):-len(q)] break diff --git a/Lib/plistlib.py b/Lib/plistlib.py --- a/Lib/plistlib.py +++ b/Lib/plistlib.py @@ -262,8 +262,8 @@ def writeData(self, data): self.beginElement("data") self.indentLevel -= 1 - maxlinelength = 76 - len(self.indent.replace("\t", " " * 8) * - self.indentLevel) + maxlinelength = max(16, 76 - len(self.indent.replace("\t", " " * 8) * + self.indentLevel)) for line in data.asBase64(maxlinelength).split("\n"): if line: self.writeln(line) diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,7 +1,7 @@ -# Autogenerated by Sphinx on Sat Apr 6 09:55:30 2013 +# Autogenerated by Sphinx on Sat May 11 22:31:13 2013 topics = {'assert': '\nThe ``assert`` statement\n************************\n\nAssert statements are a convenient way to insert debugging assertions\ninto a program:\n\n assert_stmt ::= "assert" expression ["," expression]\n\nThe simple form, ``assert expression``, is equivalent to\n\n if __debug__:\n if not expression: raise AssertionError\n\nThe extended form, ``assert expression1, expression2``, is equivalent\nto\n\n if __debug__:\n if not expression1: raise AssertionError(expression2)\n\nThese equivalences assume that ``__debug__`` and ``AssertionError``\nrefer to the built-in variables with those names. In the current\nimplementation, the built-in variable ``__debug__`` is ``True`` under\nnormal circumstances, ``False`` when optimization is requested\n(command line option -O). The current code generator emits no code\nfor an assert statement when optimization is requested at compile\ntime. Note that it is unnecessary to include the source code for the\nexpression that failed in the error message; it will be displayed as\npart of the stack trace.\n\nAssignments to ``__debug__`` are illegal. The value for the built-in\nvariable is determined when the interpreter starts.\n', 'assignment': '\nAssignment statements\n*********************\n\nAssignment statements are used to (re)bind names to values and to\nmodify attributes or items of mutable objects:\n\n assignment_stmt ::= (target_list "=")+ (expression_list | yield_expression)\n target_list ::= target ("," target)* [","]\n target ::= identifier\n | "(" target_list ")"\n | "[" target_list "]"\n | attributeref\n | subscription\n | slicing\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn assignment statement evaluates the expression list (remember that\nthis can be a single expression or a comma-separated list, the latter\nyielding a tuple) and assigns the single resulting object to each of\nthe target lists, from left to right.\n\nAssignment is defined recursively depending on the form of the target\n(list). When a target is part of a mutable object (an attribute\nreference, subscription or slicing), the mutable object must\nultimately perform the assignment and decide about its validity, and\nmay raise an exception if the assignment is unacceptable. The rules\nobserved by various types and the exceptions raised are given with the\ndefinition of the object types (see section *The standard type\nhierarchy*).\n\nAssignment of an object to a target list is recursively defined as\nfollows.\n\n* If the target list is a single target: The object is assigned to\n that target.\n\n* If the target list is a comma-separated list of targets: The object\n must be an iterable with the same number of items as there are\n targets in the target list, and the items are assigned, from left to\n right, to the corresponding targets.\n\nAssignment of an object to a single target is recursively defined as\nfollows.\n\n* If the target is an identifier (name):\n\n * If the name does not occur in a ``global`` statement in the\n current code block: the name is bound to the object in the current\n local namespace.\n\n * Otherwise: the name is bound to the object in the current global\n namespace.\n\n The name is rebound if it was already bound. This may cause the\n reference count for the object previously bound to the name to reach\n zero, causing the object to be deallocated and its destructor (if it\n has one) to be called.\n\n* If the target is a target list enclosed in parentheses or in square\n brackets: The object must be an iterable with the same number of\n items as there are targets in the target list, and its items are\n assigned, from left to right, to the corresponding targets.\n\n* If the target is an attribute reference: The primary expression in\n the reference is evaluated. It should yield an object with\n assignable attributes; if this is not the case, ``TypeError`` is\n raised. That object is then asked to assign the assigned object to\n the given attribute; if it cannot perform the assignment, it raises\n an exception (usually but not necessarily ``AttributeError``).\n\n Note: If the object is a class instance and the attribute reference\n occurs on both sides of the assignment operator, the RHS expression,\n ``a.x`` can access either an instance attribute or (if no instance\n attribute exists) a class attribute. The LHS target ``a.x`` is\n always set as an instance attribute, creating it if necessary.\n Thus, the two occurrences of ``a.x`` do not necessarily refer to the\n same attribute: if the RHS expression refers to a class attribute,\n the LHS creates a new instance attribute as the target of the\n assignment:\n\n class Cls:\n x = 3 # class variable\n inst = Cls()\n inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x as 3\n\n This description does not necessarily apply to descriptor\n attributes, such as properties created with ``property()``.\n\n* If the target is a subscription: The primary expression in the\n reference is evaluated. It should yield either a mutable sequence\n object (such as a list) or a mapping object (such as a dictionary).\n Next, the subscript expression is evaluated.\n\n If the primary is a mutable sequence object (such as a list), the\n subscript must yield a plain integer. If it is negative, the\n sequence\'s length is added to it. The resulting value must be a\n nonnegative integer less than the sequence\'s length, and the\n sequence is asked to assign the assigned object to its item with\n that index. If the index is out of range, ``IndexError`` is raised\n (assignment to a subscripted sequence cannot add new items to a\n list).\n\n If the primary is a mapping object (such as a dictionary), the\n subscript must have a type compatible with the mapping\'s key type,\n and the mapping is then asked to create a key/datum pair which maps\n the subscript to the assigned object. This can either replace an\n existing key/value pair with the same key value, or insert a new\n key/value pair (if no key with the same value existed).\n\n* If the target is a slicing: The primary expression in the reference\n is evaluated. It should yield a mutable sequence object (such as a\n list). The assigned object should be a sequence object of the same\n type. Next, the lower and upper bound expressions are evaluated,\n insofar they are present; defaults are zero and the sequence\'s\n length. The bounds should evaluate to (small) integers. If either\n bound is negative, the sequence\'s length is added to it. The\n resulting bounds are clipped to lie between zero and the sequence\'s\n length, inclusive. Finally, the sequence object is asked to replace\n the slice with the items of the assigned sequence. The length of\n the slice may be different from the length of the assigned sequence,\n thus changing the length of the target sequence, if the object\n allows it.\n\n**CPython implementation detail:** In the current implementation, the\nsyntax for targets is taken to be the same as for expressions, and\ninvalid syntax is rejected during the code generation phase, causing\nless detailed error messages.\n\nWARNING: Although the definition of assignment implies that overlaps\nbetween the left-hand side and the right-hand side are \'safe\' (for\nexample ``a, b = b, a`` swaps two variables), overlaps *within* the\ncollection of assigned-to variables are not safe! For instance, the\nfollowing program prints ``[0, 2]``:\n\n x = [0, 1]\n i = 0\n i, x[i] = 1, 2\n print x\n\n\nAugmented assignment statements\n===============================\n\nAugmented assignment is the combination, in a single statement, of a\nbinary operation and an assignment statement:\n\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\n augtarget ::= identifier | attributeref | subscription | slicing\n augop ::= "+=" | "-=" | "*=" | "/=" | "//=" | "%=" | "**="\n | ">>=" | "<<=" | "&=" | "^=" | "|="\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn augmented assignment evaluates the target (which, unlike normal\nassignment statements, cannot be an unpacking) and the expression\nlist, performs the binary operation specific to the type of assignment\non the two operands, and assigns the result to the original target.\nThe target is only evaluated once.\n\nAn augmented assignment expression like ``x += 1`` can be rewritten as\n``x = x + 1`` to achieve a similar, but not exactly equal effect. In\nthe augmented version, ``x`` is only evaluated once. Also, when\npossible, the actual operation is performed *in-place*, meaning that\nrather than creating a new object and assigning that to the target,\nthe old object is modified instead.\n\nWith the exception of assigning to tuples and multiple targets in a\nsingle statement, the assignment done by augmented assignment\nstatements is handled the same way as normal assignments. Similarly,\nwith the exception of the possible *in-place* behavior, the binary\noperation performed by augmented assignment is the same as the normal\nbinary operations.\n\nFor targets which are attribute references, the same *caveat about\nclass and instance attributes* applies as for regular assignments.\n', - 'atom-identifiers': '\nIdentifiers (Names)\n*******************\n\nAn identifier occurring as an atom is a name. See section\n*Identifiers and keywords* for lexical definition and section *Naming\nand binding* for documentation of naming and binding.\n\nWhen the name is bound to an object, evaluation of the atom yields\nthat object. When a name is not bound, an attempt to evaluate it\nraises a ``NameError`` exception.\n\n**Private name mangling:** When an identifier that textually occurs in\na class definition begins with two or more underscore characters and\ndoes not end in two or more underscores, it is considered a *private\nname* of that class. Private names are transformed to a longer form\nbefore code is generated for them. The transformation inserts the\nclass name in front of the name, with leading underscores removed, and\na single underscore inserted in front of the class name. For example,\nthe identifier ``__spam`` occurring in a class named ``Ham`` will be\ntransformed to ``_Ham__spam``. This transformation is independent of\nthe syntactical context in which the identifier is used. If the\ntransformed name is extremely long (longer than 255 characters),\nimplementation defined truncation may happen. If the class name\nconsists only of underscores, no transformation is done.\n', + 'atom-identifiers': '\nIdentifiers (Names)\n*******************\n\nAn identifier occurring as an atom is a name. See section\n*Identifiers and keywords* for lexical definition and section *Naming\nand binding* for documentation of naming and binding.\n\nWhen the name is bound to an object, evaluation of the atom yields\nthat object. When a name is not bound, an attempt to evaluate it\nraises a ``NameError`` exception.\n\n**Private name mangling:** When an identifier that textually occurs in\na class definition begins with two or more underscore characters and\ndoes not end in two or more underscores, it is considered a *private\nname* of that class. Private names are transformed to a longer form\nbefore code is generated for them. The transformation inserts the\nclass name, with leading underscores removed and a single underscore\ninserted, in front of the name. For example, the identifier\n``__spam`` occurring in a class named ``Ham`` will be transformed to\n``_Ham__spam``. This transformation is independent of the syntactical\ncontext in which the identifier is used. If the transformed name is\nextremely long (longer than 255 characters), implementation defined\ntruncation may happen. If the class name consists only of underscores,\nno transformation is done.\n', 'atom-literals': "\nLiterals\n********\n\nPython supports string literals and various numeric literals:\n\n literal ::= stringliteral | integer | longinteger\n | floatnumber | imagnumber\n\nEvaluation of a literal yields an object of the given type (string,\ninteger, long integer, floating point number, complex number) with the\ngiven value. The value may be approximated in the case of floating\npoint and imaginary (complex) literals. See section *Literals* for\ndetails.\n\nAll literals correspond to immutable data types, and hence the\nobject's identity is less important than its value. Multiple\nevaluations of literals with the same value (either the same\noccurrence in the program text or a different occurrence) may obtain\nthe same object or a different object with the same value.\n", 'attribute-access': '\nCustomizing attribute access\n****************************\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of ``x.name``)\nfor class instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for ``self``). ``name`` is the attribute name.\n This method should return the (computed) attribute value or raise\n an ``AttributeError`` exception.\n\n Note that if the attribute is found through the normal mechanism,\n ``__getattr__()`` is not called. (This is an intentional asymmetry\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\n for efficiency reasons and because otherwise ``__getattr__()``\n would have no way to access other attributes of the instance. Note\n that at least for instance variables, you can fake total control by\n not inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n ``__getattribute__()`` method below for a way to actually get total\n control in new-style classes.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If ``__setattr__()`` wants to assign to an instance attribute, it\n should not simply execute ``self.name = value`` --- this would\n cause a recursive call to itself. Instead, it should insert the\n value in the dictionary of instance attributes, e.g.,\n ``self.__dict__[name] = value``. For new-style classes, rather\n than accessing the instance dictionary, it should call the base\n class method with the same name, for example,\n ``object.__setattr__(self, name, value)``.\n\nobject.__delattr__(self, name)\n\n Like ``__setattr__()`` but for attribute deletion instead of\n assignment. This should only be implemented if ``del obj.name`` is\n meaningful for the object.\n\n\nMore attribute access for new-style classes\n===========================================\n\nThe following methods only apply to new-style classes.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines\n ``__getattr__()``, the latter will not be called unless\n ``__getattribute__()`` either calls it explicitly or raises an\n ``AttributeError``. This method should return the (computed)\n attribute value or raise an ``AttributeError`` exception. In order\n to avoid infinite recursion in this method, its implementation\n should always call the base class method with the same name to\n access any attributes it needs, for example,\n ``object.__getattribute__(self, name)``.\n\n Note: This method may still be bypassed when looking up special methods\n as the result of implicit invocation via language syntax or\n built-in functions. See *Special method lookup for new-style\n classes*.\n\n\nImplementing Descriptors\n========================\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' ``__dict__``.\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or ``None`` when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an\n ``AttributeError`` exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\n\nInvoking Descriptors\n====================\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\n``__delete__()``. If any of those methods are defined for an object,\nit is said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, ``a.x`` has a\nlookup chain starting with ``a.__dict__[\'x\']``, then\n``type(a).__dict__[\'x\']``, and continuing through the base classes of\n``type(a)`` excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called. Note that descriptors are only invoked for new\nstyle objects or classes (ones that subclass ``object()`` or\n``type()``).\n\nThe starting point for descriptor invocation is a binding, ``a.x``.\nHow the arguments are assembled depends on ``a``:\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: ``x.__get__(a)``.\n\nInstance Binding\n If binding to a new-style object instance, ``a.x`` is transformed\n into the call: ``type(a).__dict__[\'x\'].__get__(a, type(a))``.\n\nClass Binding\n If binding to a new-style class, ``A.x`` is transformed into the\n call: ``A.__dict__[\'x\'].__get__(None, A)``.\n\nSuper Binding\n If ``a`` is an instance of ``super``, then the binding ``super(B,\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\n ``A`` immediately preceding ``B`` and then invokes the descriptor\n with the call: ``A.__dict__[\'m\'].__get__(obj, obj.__class__)``.\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\nIf it does not define ``__get__()``, then accessing the attribute will\nreturn the descriptor object itself unless there is a value in the\nobject\'s instance dictionary. If the descriptor defines ``__set__()``\nand/or ``__delete__()``, it is a data descriptor; if it defines\nneither, it is a non-data descriptor. Normally, data descriptors\ndefine both ``__get__()`` and ``__set__()``, while non-data\ndescriptors have just the ``__get__()`` method. Data descriptors with\n``__set__()`` and ``__get__()`` defined always override a redefinition\nin an instance dictionary. In contrast, non-data descriptors can be\noverridden by instances.\n\nPython methods (including ``staticmethod()`` and ``classmethod()``)\nare implemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe ``property()`` function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n=========\n\nBy default, instances of both old and new-style classes have a\ndictionary for attribute storage. This wastes space for objects\nhaving very few instance variables. The space consumption can become\nacute when creating large numbers of instances.\n\nThe default can be overridden by defining *__slots__* in a new-style\nclass definition. The *__slots__* declaration takes a sequence of\ninstance variables and reserves just enough space in each instance to\nhold a value for each variable. Space is saved because *__dict__* is\nnot created for each instance.\n\n__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n new-style class, *__slots__* reserves space for the declared\n variables and prevents the automatic creation of *__dict__* and\n *__weakref__* for each instance.\n\n New in version 2.2.\n\nNotes on using *__slots__*\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises ``AttributeError``. If\n dynamic assignment of new variables is desired, then add\n ``\'__dict__\'`` to the sequence of strings in the *__slots__*\n declaration.\n\n Changed in version 2.3: Previously, adding ``\'__dict__\'`` to the\n *__slots__* declaration would not enable the assignment of new\n attributes not specifically listed in the sequence of instance\n variable names.\n\n* Without a *__weakref__* variable for each instance, classes defining\n *__slots__* do not support weak references to its instances. If weak\n reference support is needed, then add ``\'__weakref__\'`` to the\n sequence of strings in the *__slots__* declaration.\n\n Changed in version 2.3: Previously, adding ``\'__weakref__\'`` to the\n *__slots__* declaration would not enable support for weak\n references.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the instance\n variable defined by the base class slot is inaccessible (except by\n retrieving its descriptor directly from the base class). This\n renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as ``long``, ``str`` and\n ``tuple``.\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\n also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n\n Changed in version 2.6: Previously, *__class__* assignment raised an\n error if either new or old class had *__slots__*.\n', 'attribute-references': '\nAttribute references\n********************\n\nAn attribute reference is a primary followed by a period and a name:\n\n attributeref ::= primary "." identifier\n\nThe primary must evaluate to an object of a type that supports\nattribute references, e.g., a module, list, or an instance. This\nobject is then asked to produce the attribute whose name is the\nidentifier. If this attribute is not available, the exception\n``AttributeError`` is raised. Otherwise, the type and value of the\nobject produced is determined by the object. Multiple evaluations of\nthe same attribute reference may yield different objects.\n', @@ -33,14 +33,14 @@ 'exprlists': '\nExpression lists\n****************\n\n expression_list ::= expression ( "," expression )* [","]\n\nAn expression list containing at least one comma yields a tuple. The\nlength of the tuple is the number of expressions in the list. The\nexpressions are evaluated from left to right.\n\nThe trailing comma is required only to create a single tuple (a.k.a. a\n*singleton*); it is optional in all other cases. A single expression\nwithout a trailing comma doesn\'t create a tuple, but rather yields the\nvalue of that expression. (To create an empty tuple, use an empty pair\nof parentheses: ``()``.)\n', 'floating': '\nFloating point literals\n***********************\n\nFloating point literals are described by the following lexical\ndefinitions:\n\n floatnumber ::= pointfloat | exponentfloat\n pointfloat ::= [intpart] fraction | intpart "."\n exponentfloat ::= (intpart | pointfloat) exponent\n intpart ::= digit+\n fraction ::= "." digit+\n exponent ::= ("e" | "E") ["+" | "-"] digit+\n\nNote that the integer and exponent parts of floating point numbers can\nlook like octal integers, but are interpreted using radix 10. For\nexample, ``077e010`` is legal, and denotes the same number as\n``77e10``. The allowed range of floating point literals is\nimplementation-dependent. Some examples of floating point literals:\n\n 3.14 10. .001 1e100 3.14e-10 0e0\n\nNote that numeric literals do not include a sign; a phrase like ``-1``\nis actually an expression composed of the unary operator ``-`` and the\nliteral ``1``.\n', 'for': '\nThe ``for`` statement\n*********************\n\nThe ``for`` statement is used to iterate over the elements of a\nsequence (such as a string, tuple or list) or other iterable object:\n\n for_stmt ::= "for" target_list "in" expression_list ":" suite\n ["else" ":" suite]\n\nThe expression list is evaluated once; it should yield an iterable\nobject. An iterator is created for the result of the\n``expression_list``. The suite is then executed once for each item\nprovided by the iterator, in the order of ascending indices. Each\nitem in turn is assigned to the target list using the standard rules\nfor assignments, and then the suite is executed. When the items are\nexhausted (which is immediately when the sequence is empty), the suite\nin the ``else`` clause, if present, is executed, and the loop\nterminates.\n\nA ``break`` statement executed in the first suite terminates the loop\nwithout executing the ``else`` clause\'s suite. A ``continue``\nstatement executed in the first suite skips the rest of the suite and\ncontinues with the next item, or with the ``else`` clause if there was\nno next item.\n\nThe suite may assign to the variable(s) in the target list; this does\nnot affect the next item assigned to it.\n\nThe target list is not deleted when the loop is finished, but if the\nsequence is empty, it will not have been assigned to at all by the\nloop. Hint: the built-in function ``range()`` returns a sequence of\nintegers suitable to emulate the effect of Pascal\'s ``for i := a to b\ndo``; e.g., ``range(3)`` returns the list ``[0, 1, 2]``.\n\nNote: There is a subtlety when the sequence is being modified by the loop\n (this can only occur for mutable sequences, i.e. lists). An internal\n counter is used to keep track of which item is used next, and this\n is incremented on each iteration. When this counter has reached the\n length of the sequence the loop terminates. This means that if the\n suite deletes the current (or a previous) item from the sequence,\n the next item will be skipped (since it gets the index of the\n current item which has already been treated). Likewise, if the\n suite inserts an item in the sequence before the current item, the\n current item will be treated again the next time through the loop.\n This can lead to nasty bugs that can be avoided by making a\n temporary copy using a slice of the whole sequence, e.g.,\n\n for x in a[:]:\n if x < 0: a.remove(x)\n', - 'formatstrings': '\nFormat String Syntax\n********************\n\nThe ``str.format()`` method and the ``Formatter`` class share the same\nsyntax for format strings (although in the case of ``Formatter``,\nsubclasses can define their own format string syntax).\n\nFormat strings contain "replacement fields" surrounded by curly braces\n``{}``. Anything that is not contained in braces is considered literal\ntext, which is copied unchanged to the output. If you need to include\na brace character in the literal text, it can be escaped by doubling:\n``{{`` and ``}}``.\n\nThe grammar for a replacement field is as follows:\n\n replacement_field ::= "{" [field_name] ["!" conversion] [":" format_spec] "}"\n field_name ::= arg_name ("." attribute_name | "[" element_index "]")*\n arg_name ::= [identifier | integer]\n attribute_name ::= identifier\n element_index ::= integer | index_string\n index_string ::= +\n conversion ::= "r" | "s"\n format_spec ::= \n\nIn less formal terms, the replacement field can start with a\n*field_name* that specifies the object whose value is to be formatted\nand inserted into the output instead of the replacement field. The\n*field_name* is optionally followed by a *conversion* field, which is\npreceded by an exclamation point ``\'!\'``, and a *format_spec*, which\nis preceded by a colon ``\':\'``. These specify a non-default format\nfor the replacement value.\n\nSee also the *Format Specification Mini-Language* section.\n\nThe *field_name* itself begins with an *arg_name* that is either a\nnumber or a keyword. If it\'s a number, it refers to a positional\nargument, and if it\'s a keyword, it refers to a named keyword\nargument. If the numerical arg_names in a format string are 0, 1, 2,\n... in sequence, they can all be omitted (not just some) and the\nnumbers 0, 1, 2, ... will be automatically inserted in that order.\nBecause *arg_name* is not quote-delimited, it is not possible to\nspecify arbitrary dictionary keys (e.g., the strings ``\'10\'`` or\n``\':-]\'``) within a format string. The *arg_name* can be followed by\nany number of index or attribute expressions. An expression of the\nform ``\'.name\'`` selects the named attribute using ``getattr()``,\nwhile an expression of the form ``\'[index]\'`` does an index lookup\nusing ``__getitem__()``.\n\nChanged in version 2.7: The positional argument specifiers can be\nomitted, so ``\'{} {}\'`` is equivalent to ``\'{0} {1}\'``.\n\nSome simple format string examples:\n\n "First, thou shalt count to {0}" # References first positional argument\n "Bring me a {}" # Implicitly references the first positional argument\n "From {} to {}" # Same as "From {0} to {1}"\n "My quest is {name}" # References keyword argument \'name\'\n "Weight in tons {0.weight}" # \'weight\' attribute of first positional arg\n "Units destroyed: {players[0]}" # First element of keyword argument \'players\'.\n\nThe *conversion* field causes a type coercion before formatting.\nNormally, the job of formatting a value is done by the\n``__format__()`` method of the value itself. However, in some cases\nit is desirable to force a type to be formatted as a string,\noverriding its own definition of formatting. By converting the value\nto a string before calling ``__format__()``, the normal formatting\nlogic is bypassed.\n\nTwo conversion flags are currently supported: ``\'!s\'`` which calls\n``str()`` on the value, and ``\'!r\'`` which calls ``repr()``.\n\nSome examples:\n\n "Harold\'s a clever {0!s}" # Calls str() on the argument first\n "Bring out the holy {name!r}" # Calls repr() on the argument first\n\nThe *format_spec* field contains a specification of how the value\nshould be presented, including such details as field width, alignment,\npadding, decimal precision and so on. Each value type can define its\nown "formatting mini-language" or interpretation of the *format_spec*.\n\nMost built-in types support a common formatting mini-language, which\nis described in the next section.\n\nA *format_spec* field can also include nested replacement fields\nwithin it. These nested replacement fields can contain only a field\nname; conversion flags and format specifications are not allowed. The\nreplacement fields within the format_spec are substituted before the\n*format_spec* string is interpreted. This allows the formatting of a\nvalue to be dynamically specified.\n\nSee the *Format examples* section for some examples.\n\n\nFormat Specification Mini-Language\n==================================\n\n"Format specifications" are used within replacement fields contained\nwithin a format string to define how individual values are presented\n(see *Format String Syntax*). They can also be passed directly to the\nbuilt-in ``format()`` function. Each formattable type may define how\nthe format specification is to be interpreted.\n\nMost built-in types implement the following options for format\nspecifications, although some of the formatting options are only\nsupported by the numeric types.\n\nA general convention is that an empty format string (``""``) produces\nthe same result as if you had called ``str()`` on the value. A non-\nempty format string typically modifies the result.\n\nThe general form of a *standard format specifier* is:\n\n format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]\n fill ::= \n align ::= "<" | ">" | "=" | "^"\n sign ::= "+" | "-" | " "\n width ::= integer\n precision ::= integer\n type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"\n\nThe *fill* character can be any character other than \'{\' or \'}\'. The\npresence of a fill character is signaled by the character following\nit, which must be one of the alignment options. If the second\ncharacter of *format_spec* is not a valid alignment option, then it is\nassumed that both the fill character and the alignment option are\nabsent.\n\nThe meaning of the various alignment options is as follows:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'<\'`` | Forces the field to be left-aligned within the available |\n | | space (this is the default for most objects). |\n +-----------+------------------------------------------------------------+\n | ``\'>\'`` | Forces the field to be right-aligned within the available |\n | | space (this is the default for numbers). |\n +-----------+------------------------------------------------------------+\n | ``\'=\'`` | Forces the padding to be placed after the sign (if any) |\n | | but before the digits. This is used for printing fields |\n | | in the form \'+000000120\'. This alignment option is only |\n | | valid for numeric types. |\n +-----------+------------------------------------------------------------+\n | ``\'^\'`` | Forces the field to be centered within the available |\n | | space. |\n +-----------+------------------------------------------------------------+\n\nNote that unless a minimum field width is defined, the field width\nwill always be the same size as the data to fill it, so that the\nalignment option has no meaning in this case.\n\nThe *sign* option is only valid for number types, and can be one of\nthe following:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'+\'`` | indicates that a sign should be used for both positive as |\n | | well as negative numbers. |\n +-----------+------------------------------------------------------------+\n | ``\'-\'`` | indicates that a sign should be used only for negative |\n | | numbers (this is the default behavior). |\n +-----------+------------------------------------------------------------+\n | space | indicates that a leading space should be used on positive |\n | | numbers, and a minus sign on negative numbers. |\n +-----------+------------------------------------------------------------+\n\nThe ``\'#\'`` option is only valid for integers, and only for binary,\noctal, or hexadecimal output. If present, it specifies that the\noutput will be prefixed by ``\'0b\'``, ``\'0o\'``, or ``\'0x\'``,\nrespectively.\n\nThe ``\',\'`` option signals the use of a comma for a thousands\nseparator. For a locale aware separator, use the ``\'n\'`` integer\npresentation type instead.\n\nChanged in version 2.7: Added the ``\',\'`` option (see also **PEP\n378**).\n\n*width* is a decimal integer defining the minimum field width. If not\nspecified, then the field width will be determined by the content.\n\nPreceding the *width* field by a zero (``\'0\'``) character enables\nsign-aware zero-padding for numeric types. This is equivalent to a\n*fill* character of ``\'0\'`` with an *alignment* type of ``\'=\'``.\n\nThe *precision* is a decimal number indicating how many digits should\nbe displayed after the decimal point for a floating point value\nformatted with ``\'f\'`` and ``\'F\'``, or before and after the decimal\npoint for a floating point value formatted with ``\'g\'`` or ``\'G\'``.\nFor non-number types the field indicates the maximum field size - in\nother words, how many characters will be used from the field content.\nThe *precision* is not allowed for integer values.\n\nFinally, the *type* determines how the data should be presented.\n\nThe available string presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'s\'`` | String format. This is the default type for strings and |\n | | may be omitted. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'s\'``. |\n +-----------+------------------------------------------------------------+\n\nThe available integer presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'b\'`` | Binary format. Outputs the number in base 2. |\n +-----------+------------------------------------------------------------+\n | ``\'c\'`` | Character. Converts the integer to the corresponding |\n | | unicode character before printing. |\n +-----------+------------------------------------------------------------+\n | ``\'d\'`` | Decimal Integer. Outputs the number in base 10. |\n +-----------+------------------------------------------------------------+\n | ``\'o\'`` | Octal format. Outputs the number in base 8. |\n +-----------+------------------------------------------------------------+\n | ``\'x\'`` | Hex format. Outputs the number in base 16, using lower- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'X\'`` | Hex format. Outputs the number in base 16, using upper- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'d\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'d\'``. |\n +-----------+------------------------------------------------------------+\n\nIn addition to the above presentation types, integers can be formatted\nwith the floating point presentation types listed below (except\n``\'n\'`` and None). When doing so, ``float()`` is used to convert the\ninteger to a floating point number before formatting.\n\nThe available presentation types for floating point and decimal values\nare:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'e\'`` | Exponent notation. Prints the number in scientific |\n | | notation using the letter \'e\' to indicate the exponent. |\n +-----------+------------------------------------------------------------+\n | ``\'E\'`` | Exponent notation. Same as ``\'e\'`` except it uses an upper |\n | | case \'E\' as the separator character. |\n +-----------+------------------------------------------------------------+\n | ``\'f\'`` | Fixed point. Displays the number as a fixed-point number. |\n +-----------+------------------------------------------------------------+\n | ``\'F\'`` | Fixed point. Same as ``\'f\'``. |\n +-----------+------------------------------------------------------------+\n | ``\'g\'`` | General format. For a given precision ``p >= 1``, this |\n | | rounds the number to ``p`` significant digits and then |\n | | formats the result in either fixed-point format or in |\n | | scientific notation, depending on its magnitude. The |\n | | precise rules are as follows: suppose that the result |\n | | formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1`` would have exponent ``exp``. Then if ``-4 <= exp |\n | | < p``, the number is formatted with presentation type |\n | | ``\'f\'`` and precision ``p-1-exp``. Otherwise, the number |\n | | is formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1``. In both cases insignificant trailing zeros are |\n | | removed from the significand, and the decimal point is |\n | | also removed if there are no remaining digits following |\n | | it. Positive and negative infinity, positive and negative |\n | | zero, and nans, are formatted as ``inf``, ``-inf``, ``0``, |\n | | ``-0`` and ``nan`` respectively, regardless of the |\n | | precision. A precision of ``0`` is treated as equivalent |\n | | to a precision of ``1``. |\n +-----------+------------------------------------------------------------+\n | ``\'G\'`` | General format. Same as ``\'g\'`` except switches to ``\'E\'`` |\n | | if the number gets too large. The representations of |\n | | infinity and NaN are uppercased, too. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'g\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | ``\'%\'`` | Percentage. Multiplies the number by 100 and displays in |\n | | fixed (``\'f\'``) format, followed by a percent sign. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'g\'``. |\n +-----------+------------------------------------------------------------+\n\n\nFormat examples\n===============\n\nThis section contains examples of the new format syntax and comparison\nwith the old ``%``-formatting.\n\nIn most of the cases the syntax is similar to the old\n``%``-formatting, with the addition of the ``{}`` and with ``:`` used\ninstead of ``%``. For example, ``\'%03.2f\'`` can be translated to\n``\'{:03.2f}\'``.\n\nThe new format syntax also supports new and different options, shown\nin the follow examples.\n\nAccessing arguments by position:\n\n >>> \'{0}, {1}, {2}\'.format(\'a\', \'b\', \'c\')\n \'a, b, c\'\n >>> \'{}, {}, {}\'.format(\'a\', \'b\', \'c\') # 2.7+ only\n \'a, b, c\'\n >>> \'{2}, {1}, {0}\'.format(\'a\', \'b\', \'c\')\n \'c, b, a\'\n >>> \'{2}, {1}, {0}\'.format(*\'abc\') # unpacking argument sequence\n \'c, b, a\'\n >>> \'{0}{1}{0}\'.format(\'abra\', \'cad\') # arguments\' indices can be repeated\n \'abracadabra\'\n\nAccessing arguments by name:\n\n >>> \'Coordinates: {latitude}, {longitude}\'.format(latitude=\'37.24N\', longitude=\'-115.81W\')\n \'Coordinates: 37.24N, -115.81W\'\n >>> coord = {\'latitude\': \'37.24N\', \'longitude\': \'-115.81W\'}\n >>> \'Coordinates: {latitude}, {longitude}\'.format(**coord)\n \'Coordinates: 37.24N, -115.81W\'\n\nAccessing arguments\' attributes:\n\n >>> c = 3-5j\n >>> (\'The complex number {0} is formed from the real part {0.real} \'\n ... \'and the imaginary part {0.imag}.\').format(c)\n \'The complex number (3-5j) is formed from the real part 3.0 and the imaginary part -5.0.\'\n >>> class Point(object):\n ... def __init__(self, x, y):\n ... self.x, self.y = x, y\n ... def __str__(self):\n ... return \'Point({self.x}, {self.y})\'.format(self=self)\n ...\n >>> str(Point(4, 2))\n \'Point(4, 2)\'\n\nAccessing arguments\' items:\n\n >>> coord = (3, 5)\n >>> \'X: {0[0]}; Y: {0[1]}\'.format(coord)\n \'X: 3; Y: 5\'\n\nReplacing ``%s`` and ``%r``:\n\n >>> "repr() shows quotes: {!r}; str() doesn\'t: {!s}".format(\'test1\', \'test2\')\n "repr() shows quotes: \'test1\'; str() doesn\'t: test2"\n\nAligning the text and specifying a width:\n\n >>> \'{:<30}\'.format(\'left aligned\')\n \'left aligned \'\n >>> \'{:>30}\'.format(\'right aligned\')\n \' right aligned\'\n >>> \'{:^30}\'.format(\'centered\')\n \' centered \'\n >>> \'{:*^30}\'.format(\'centered\') # use \'*\' as a fill char\n \'***********centered***********\'\n\nReplacing ``%+f``, ``%-f``, and ``% f`` and specifying a sign:\n\n >>> \'{:+f}; {:+f}\'.format(3.14, -3.14) # show it always\n \'+3.140000; -3.140000\'\n >>> \'{: f}; {: f}\'.format(3.14, -3.14) # show a space for positive numbers\n \' 3.140000; -3.140000\'\n >>> \'{:-f}; {:-f}\'.format(3.14, -3.14) # show only the minus -- same as \'{:f}; {:f}\'\n \'3.140000; -3.140000\'\n\nReplacing ``%x`` and ``%o`` and converting the value to different\nbases:\n\n >>> # format also supports binary numbers\n >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}".format(42)\n \'int: 42; hex: 2a; oct: 52; bin: 101010\'\n >>> # with 0x, 0o, or 0b as prefix:\n >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}".format(42)\n \'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010\'\n\nUsing the comma as a thousands separator:\n\n >>> \'{:,}\'.format(1234567890)\n \'1,234,567,890\'\n\nExpressing a percentage:\n\n >>> points = 19.5\n >>> total = 22\n >>> \'Correct answers: {:.2%}\'.format(points/total)\n \'Correct answers: 88.64%\'\n\nUsing type-specific formatting:\n\n >>> import datetime\n >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\n >>> \'{:%Y-%m-%d %H:%M:%S}\'.format(d)\n \'2010-07-04 12:15:58\'\n\nNesting arguments and more complex examples:\n\n >>> for align, text in zip(\'<^>\', [\'left\', \'center\', \'right\']):\n ... \'{0:{fill}{align}16}\'.format(text, fill=align, align=align)\n ...\n \'left<<<<<<<<<<<<\'\n \'^^^^^center^^^^^\'\n \'>>>>>>>>>>>right\'\n >>>\n >>> octets = [192, 168, 0, 1]\n >>> \'{:02X}{:02X}{:02X}{:02X}\'.format(*octets)\n \'C0A80001\'\n >>> int(_, 16)\n 3232235521\n >>>\n >>> width = 5\n >>> for num in range(5,12):\n ... for base in \'dXob\':\n ... print \'{0:{width}{base}}\'.format(num, base=base, width=width),\n ... print\n ...\n 5 5 5 101\n 6 6 6 110\n 7 7 7 111\n 8 8 10 1000\n 9 9 11 1001\n 10 A 12 1010\n 11 B 13 1011\n', + 'formatstrings': '\nFormat String Syntax\n********************\n\nThe ``str.format()`` method and the ``Formatter`` class share the same\nsyntax for format strings (although in the case of ``Formatter``,\nsubclasses can define their own format string syntax).\n\nFormat strings contain "replacement fields" surrounded by curly braces\n``{}``. Anything that is not contained in braces is considered literal\ntext, which is copied unchanged to the output. If you need to include\na brace character in the literal text, it can be escaped by doubling:\n``{{`` and ``}}``.\n\nThe grammar for a replacement field is as follows:\n\n replacement_field ::= "{" [field_name] ["!" conversion] [":" format_spec] "}"\n field_name ::= arg_name ("." attribute_name | "[" element_index "]")*\n arg_name ::= [identifier | integer]\n attribute_name ::= identifier\n element_index ::= integer | index_string\n index_string ::= +\n conversion ::= "r" | "s"\n format_spec ::= \n\nIn less formal terms, the replacement field can start with a\n*field_name* that specifies the object whose value is to be formatted\nand inserted into the output instead of the replacement field. The\n*field_name* is optionally followed by a *conversion* field, which is\npreceded by an exclamation point ``\'!\'``, and a *format_spec*, which\nis preceded by a colon ``\':\'``. These specify a non-default format\nfor the replacement value.\n\nSee also the *Format Specification Mini-Language* section.\n\nThe *field_name* itself begins with an *arg_name* that is either a\nnumber or a keyword. If it\'s a number, it refers to a positional\nargument, and if it\'s a keyword, it refers to a named keyword\nargument. If the numerical arg_names in a format string are 0, 1, 2,\n... in sequence, they can all be omitted (not just some) and the\nnumbers 0, 1, 2, ... will be automatically inserted in that order.\nBecause *arg_name* is not quote-delimited, it is not possible to\nspecify arbitrary dictionary keys (e.g., the strings ``\'10\'`` or\n``\':-]\'``) within a format string. The *arg_name* can be followed by\nany number of index or attribute expressions. An expression of the\nform ``\'.name\'`` selects the named attribute using ``getattr()``,\nwhile an expression of the form ``\'[index]\'`` does an index lookup\nusing ``__getitem__()``.\n\nChanged in version 2.7: The positional argument specifiers can be\nomitted, so ``\'{} {}\'`` is equivalent to ``\'{0} {1}\'``.\n\nSome simple format string examples:\n\n "First, thou shalt count to {0}" # References first positional argument\n "Bring me a {}" # Implicitly references the first positional argument\n "From {} to {}" # Same as "From {0} to {1}"\n "My quest is {name}" # References keyword argument \'name\'\n "Weight in tons {0.weight}" # \'weight\' attribute of first positional arg\n "Units destroyed: {players[0]}" # First element of keyword argument \'players\'.\n\nThe *conversion* field causes a type coercion before formatting.\nNormally, the job of formatting a value is done by the\n``__format__()`` method of the value itself. However, in some cases\nit is desirable to force a type to be formatted as a string,\noverriding its own definition of formatting. By converting the value\nto a string before calling ``__format__()``, the normal formatting\nlogic is bypassed.\n\nTwo conversion flags are currently supported: ``\'!s\'`` which calls\n``str()`` on the value, and ``\'!r\'`` which calls ``repr()``.\n\nSome examples:\n\n "Harold\'s a clever {0!s}" # Calls str() on the argument first\n "Bring out the holy {name!r}" # Calls repr() on the argument first\n\nThe *format_spec* field contains a specification of how the value\nshould be presented, including such details as field width, alignment,\npadding, decimal precision and so on. Each value type can define its\nown "formatting mini-language" or interpretation of the *format_spec*.\n\nMost built-in types support a common formatting mini-language, which\nis described in the next section.\n\nA *format_spec* field can also include nested replacement fields\nwithin it. These nested replacement fields can contain only a field\nname; conversion flags and format specifications are not allowed. The\nreplacement fields within the format_spec are substituted before the\n*format_spec* string is interpreted. This allows the formatting of a\nvalue to be dynamically specified.\n\nSee the *Format examples* section for some examples.\n\n\nFormat Specification Mini-Language\n==================================\n\n"Format specifications" are used within replacement fields contained\nwithin a format string to define how individual values are presented\n(see *Format String Syntax*). They can also be passed directly to the\nbuilt-in ``format()`` function. Each formattable type may define how\nthe format specification is to be interpreted.\n\nMost built-in types implement the following options for format\nspecifications, although some of the formatting options are only\nsupported by the numeric types.\n\nA general convention is that an empty format string (``""``) produces\nthe same result as if you had called ``str()`` on the value. A non-\nempty format string typically modifies the result.\n\nThe general form of a *standard format specifier* is:\n\n format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]\n fill ::= \n align ::= "<" | ">" | "=" | "^"\n sign ::= "+" | "-" | " "\n width ::= integer\n precision ::= integer\n type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"\n\nThe *fill* character can be any character other than \'{\' or \'}\'. The\npresence of a fill character is signaled by the character following\nit, which must be one of the alignment options. If the second\ncharacter of *format_spec* is not a valid alignment option, then it is\nassumed that both the fill character and the alignment option are\nabsent.\n\nThe meaning of the various alignment options is as follows:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'<\'`` | Forces the field to be left-aligned within the available |\n | | space (this is the default for most objects). |\n +-----------+------------------------------------------------------------+\n | ``\'>\'`` | Forces the field to be right-aligned within the available |\n | | space (this is the default for numbers). |\n +-----------+------------------------------------------------------------+\n | ``\'=\'`` | Forces the padding to be placed after the sign (if any) |\n | | but before the digits. This is used for printing fields |\n | | in the form \'+000000120\'. This alignment option is only |\n | | valid for numeric types. |\n +-----------+------------------------------------------------------------+\n | ``\'^\'`` | Forces the field to be centered within the available |\n | | space. |\n +-----------+------------------------------------------------------------+\n\nNote that unless a minimum field width is defined, the field width\nwill always be the same size as the data to fill it, so that the\nalignment option has no meaning in this case.\n\nThe *sign* option is only valid for number types, and can be one of\nthe following:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | ``\'+\'`` | indicates that a sign should be used for both positive as |\n | | well as negative numbers. |\n +-----------+------------------------------------------------------------+\n | ``\'-\'`` | indicates that a sign should be used only for negative |\n | | numbers (this is the default behavior). |\n +-----------+------------------------------------------------------------+\n | space | indicates that a leading space should be used on positive |\n | | numbers, and a minus sign on negative numbers. |\n +-----------+------------------------------------------------------------+\n\nThe ``\'#\'`` option is only valid for integers, and only for binary,\noctal, or hexadecimal output. If present, it specifies that the\noutput will be prefixed by ``\'0b\'``, ``\'0o\'``, or ``\'0x\'``,\nrespectively.\n\nThe ``\',\'`` option signals the use of a comma for a thousands\nseparator. For a locale aware separator, use the ``\'n\'`` integer\npresentation type instead.\n\nChanged in version 2.7: Added the ``\',\'`` option (see also **PEP\n378**).\n\n*width* is a decimal integer defining the minimum field width. If not\nspecified, then the field width will be determined by the content.\n\nPreceding the *width* field by a zero (``\'0\'``) character enables\nsign-aware zero-padding for numeric types. This is equivalent to a\n*fill* character of ``\'0\'`` with an *alignment* type of ``\'=\'``.\n\nThe *precision* is a decimal number indicating how many digits should\nbe displayed after the decimal point for a floating point value\nformatted with ``\'f\'`` and ``\'F\'``, or before and after the decimal\npoint for a floating point value formatted with ``\'g\'`` or ``\'G\'``.\nFor non-number types the field indicates the maximum field size - in\nother words, how many characters will be used from the field content.\nThe *precision* is not allowed for integer values.\n\nFinally, the *type* determines how the data should be presented.\n\nThe available string presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'s\'`` | String format. This is the default type for strings and |\n | | may be omitted. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'s\'``. |\n +-----------+------------------------------------------------------------+\n\nThe available integer presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'b\'`` | Binary format. Outputs the number in base 2. |\n +-----------+------------------------------------------------------------+\n | ``\'c\'`` | Character. Converts the integer to the corresponding |\n | | unicode character before printing. |\n +-----------+------------------------------------------------------------+\n | ``\'d\'`` | Decimal Integer. Outputs the number in base 10. |\n +-----------+------------------------------------------------------------+\n | ``\'o\'`` | Octal format. Outputs the number in base 8. |\n +-----------+------------------------------------------------------------+\n | ``\'x\'`` | Hex format. Outputs the number in base 16, using lower- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'X\'`` | Hex format. Outputs the number in base 16, using upper- |\n | | case letters for the digits above 9. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'d\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'d\'``. |\n +-----------+------------------------------------------------------------+\n\nIn addition to the above presentation types, integers can be formatted\nwith the floating point presentation types listed below (except\n``\'n\'`` and None). When doing so, ``float()`` is used to convert the\ninteger to a floating point number before formatting.\n\nThe available presentation types for floating point and decimal values\nare:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | ``\'e\'`` | Exponent notation. Prints the number in scientific |\n | | notation using the letter \'e\' to indicate the exponent. |\n | | The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'E\'`` | Exponent notation. Same as ``\'e\'`` except it uses an upper |\n | | case \'E\' as the separator character. |\n +-----------+------------------------------------------------------------+\n | ``\'f\'`` | Fixed point. Displays the number as a fixed-point number. |\n | | The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'F\'`` | Fixed point. Same as ``\'f\'``. |\n +-----------+------------------------------------------------------------+\n | ``\'g\'`` | General format. For a given precision ``p >= 1``, this |\n | | rounds the number to ``p`` significant digits and then |\n | | formats the result in either fixed-point format or in |\n | | scientific notation, depending on its magnitude. The |\n | | precise rules are as follows: suppose that the result |\n | | formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1`` would have exponent ``exp``. Then if ``-4 <= exp |\n | | < p``, the number is formatted with presentation type |\n | | ``\'f\'`` and precision ``p-1-exp``. Otherwise, the number |\n | | is formatted with presentation type ``\'e\'`` and precision |\n | | ``p-1``. In both cases insignificant trailing zeros are |\n | | removed from the significand, and the decimal point is |\n | | also removed if there are no remaining digits following |\n | | it. Positive and negative infinity, positive and negative |\n | | zero, and nans, are formatted as ``inf``, ``-inf``, ``0``, |\n | | ``-0`` and ``nan`` respectively, regardless of the |\n | | precision. A precision of ``0`` is treated as equivalent |\n | | to a precision of ``1``. The default precision is ``6``. |\n +-----------+------------------------------------------------------------+\n | ``\'G\'`` | General format. Same as ``\'g\'`` except switches to ``\'E\'`` |\n | | if the number gets too large. The representations of |\n | | infinity and NaN are uppercased, too. |\n +-----------+------------------------------------------------------------+\n | ``\'n\'`` | Number. This is the same as ``\'g\'``, except that it uses |\n | | the current locale setting to insert the appropriate |\n | | number separator characters. |\n +-----------+------------------------------------------------------------+\n | ``\'%\'`` | Percentage. Multiplies the number by 100 and displays in |\n | | fixed (``\'f\'``) format, followed by a percent sign. |\n +-----------+------------------------------------------------------------+\n | None | The same as ``\'g\'``. |\n +-----------+------------------------------------------------------------+\n\n\nFormat examples\n===============\n\nThis section contains examples of the new format syntax and comparison\nwith the old ``%``-formatting.\n\nIn most of the cases the syntax is similar to the old\n``%``-formatting, with the addition of the ``{}`` and with ``:`` used\ninstead of ``%``. For example, ``\'%03.2f\'`` can be translated to\n``\'{:03.2f}\'``.\n\nThe new format syntax also supports new and different options, shown\nin the follow examples.\n\nAccessing arguments by position:\n\n >>> \'{0}, {1}, {2}\'.format(\'a\', \'b\', \'c\')\n \'a, b, c\'\n >>> \'{}, {}, {}\'.format(\'a\', \'b\', \'c\') # 2.7+ only\n \'a, b, c\'\n >>> \'{2}, {1}, {0}\'.format(\'a\', \'b\', \'c\')\n \'c, b, a\'\n >>> \'{2}, {1}, {0}\'.format(*\'abc\') # unpacking argument sequence\n \'c, b, a\'\n >>> \'{0}{1}{0}\'.format(\'abra\', \'cad\') # arguments\' indices can be repeated\n \'abracadabra\'\n\nAccessing arguments by name:\n\n >>> \'Coordinates: {latitude}, {longitude}\'.format(latitude=\'37.24N\', longitude=\'-115.81W\')\n \'Coordinates: 37.24N, -115.81W\'\n >>> coord = {\'latitude\': \'37.24N\', \'longitude\': \'-115.81W\'}\n >>> \'Coordinates: {latitude}, {longitude}\'.format(**coord)\n \'Coordinates: 37.24N, -115.81W\'\n\nAccessing arguments\' attributes:\n\n >>> c = 3-5j\n >>> (\'The complex number {0} is formed from the real part {0.real} \'\n ... \'and the imaginary part {0.imag}.\').format(c)\n \'The complex number (3-5j) is formed from the real part 3.0 and the imaginary part -5.0.\'\n >>> class Point(object):\n ... def __init__(self, x, y):\n ... self.x, self.y = x, y\n ... def __str__(self):\n ... return \'Point({self.x}, {self.y})\'.format(self=self)\n ...\n >>> str(Point(4, 2))\n \'Point(4, 2)\'\n\nAccessing arguments\' items:\n\n >>> coord = (3, 5)\n >>> \'X: {0[0]}; Y: {0[1]}\'.format(coord)\n \'X: 3; Y: 5\'\n\nReplacing ``%s`` and ``%r``:\n\n >>> "repr() shows quotes: {!r}; str() doesn\'t: {!s}".format(\'test1\', \'test2\')\n "repr() shows quotes: \'test1\'; str() doesn\'t: test2"\n\nAligning the text and specifying a width:\n\n >>> \'{:<30}\'.format(\'left aligned\')\n \'left aligned \'\n >>> \'{:>30}\'.format(\'right aligned\')\n \' right aligned\'\n >>> \'{:^30}\'.format(\'centered\')\n \' centered \'\n >>> \'{:*^30}\'.format(\'centered\') # use \'*\' as a fill char\n \'***********centered***********\'\n\nReplacing ``%+f``, ``%-f``, and ``% f`` and specifying a sign:\n\n >>> \'{:+f}; {:+f}\'.format(3.14, -3.14) # show it always\n \'+3.140000; -3.140000\'\n >>> \'{: f}; {: f}\'.format(3.14, -3.14) # show a space for positive numbers\n \' 3.140000; -3.140000\'\n >>> \'{:-f}; {:-f}\'.format(3.14, -3.14) # show only the minus -- same as \'{:f}; {:f}\'\n \'3.140000; -3.140000\'\n\nReplacing ``%x`` and ``%o`` and converting the value to different\nbases:\n\n >>> # format also supports binary numbers\n >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}".format(42)\n \'int: 42; hex: 2a; oct: 52; bin: 101010\'\n >>> # with 0x, 0o, or 0b as prefix:\n >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}".format(42)\n \'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010\'\n\nUsing the comma as a thousands separator:\n\n >>> \'{:,}\'.format(1234567890)\n \'1,234,567,890\'\n\nExpressing a percentage:\n\n >>> points = 19.5\n >>> total = 22\n >>> \'Correct answers: {:.2%}\'.format(points/total)\n \'Correct answers: 88.64%\'\n\nUsing type-specific formatting:\n\n >>> import datetime\n >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\n >>> \'{:%Y-%m-%d %H:%M:%S}\'.format(d)\n \'2010-07-04 12:15:58\'\n\nNesting arguments and more complex examples:\n\n >>> for align, text in zip(\'<^>\', [\'left\', \'center\', \'right\']):\n ... \'{0:{fill}{align}16}\'.format(text, fill=align, align=align)\n ...\n \'left<<<<<<<<<<<<\'\n \'^^^^^center^^^^^\'\n \'>>>>>>>>>>>right\'\n >>>\n >>> octets = [192, 168, 0, 1]\n >>> \'{:02X}{:02X}{:02X}{:02X}\'.format(*octets)\n \'C0A80001\'\n >>> int(_, 16)\n 3232235521\n >>>\n >>> width = 5\n >>> for num in range(5,12):\n ... for base in \'dXob\':\n ... print \'{0:{width}{base}}\'.format(num, base=base, width=width),\n ... print\n ...\n 5 5 5 101\n 6 6 6 110\n 7 7 7 111\n 8 8 10 1000\n 9 9 11 1001\n 10 A 12 1010\n 11 B 13 1011\n', 'function': '\nFunction definitions\n********************\n\nA function definition defines a user-defined function object (see\nsection *The standard type hierarchy*):\n\n decorated ::= decorators (classdef | funcdef)\n decorators ::= decorator+\n decorator ::= "@" dotted_name ["(" [argument_list [","]] ")"] NEWLINE\n funcdef ::= "def" funcname "(" [parameter_list] ")" ":" suite\n dotted_name ::= identifier ("." identifier)*\n parameter_list ::= (defparameter ",")*\n ( "*" identifier ["," "**" identifier]\n | "**" identifier\n | defparameter [","] )\n defparameter ::= parameter ["=" expression]\n sublist ::= parameter ("," parameter)* [","]\n parameter ::= identifier | "(" sublist ")"\n funcname ::= identifier\n\nA function definition is an executable statement. Its execution binds\nthe function name in the current local namespace to a function object\n(a wrapper around the executable code for the function). This\nfunction object contains a reference to the current global namespace\nas the global namespace to be used when the function is called.\n\nThe function definition does not execute the function body; this gets\nexecuted only when the function is called. [3]\n\nA function definition may be wrapped by one or more *decorator*\nexpressions. Decorator expressions are evaluated when the function is\ndefined, in the scope that contains the function definition. The\nresult must be a callable, which is invoked with the function object\nas the only argument. The returned value is bound to the function name\ninstead of the function object. Multiple decorators are applied in\nnested fashion. For example, the following code:\n\n @f1(arg)\n @f2\n def func(): pass\n\nis equivalent to:\n\n def func(): pass\n func = f1(arg)(f2(func))\n\nWhen one or more top-level *parameters* have the form *parameter*\n``=`` *expression*, the function is said to have "default parameter\nvalues." For a parameter with a default value, the corresponding\n*argument* may be omitted from a call, in which case the parameter\'s\ndefault value is substituted. If a parameter has a default value, all\nfollowing parameters must also have a default value --- this is a\nsyntactic restriction that is not expressed by the grammar.\n\n**Default parameter values are evaluated when the function definition\nis executed.** This means that the expression is evaluated once, when\nthe function is defined, and that the same "pre-computed" value is\nused for each call. This is especially important to understand when a\ndefault parameter is a mutable object, such as a list or a dictionary:\nif the function modifies the object (e.g. by appending an item to a\nlist), the default value is in effect modified. This is generally not\nwhat was intended. A way around this is to use ``None`` as the\ndefault, and explicitly test for it in the body of the function, e.g.:\n\n def whats_on_the_telly(penguin=None):\n if penguin is None:\n penguin = []\n penguin.append("property of the zoo")\n return penguin\n\nFunction call semantics are described in more detail in section\n*Calls*. A function call always assigns values to all parameters\nmentioned in the parameter list, either from position arguments, from\nkeyword arguments, or from default values. If the form\n"``*identifier``" is present, it is initialized to a tuple receiving\nany excess positional parameters, defaulting to the empty tuple. If\nthe form "``**identifier``" is present, it is initialized to a new\ndictionary receiving any excess keyword arguments, defaulting to a new\nempty dictionary.\n\nIt is also possible to create anonymous functions (functions not bound\nto a name), for immediate use in expressions. This uses lambda forms,\ndescribed in section *Lambdas*. Note that the lambda form is merely a\nshorthand for a simplified function definition; a function defined in\na "``def``" statement can be passed around or assigned to another name\njust like a function defined by a lambda form. The "``def``" form is\nactually more powerful since it allows the execution of multiple\nstatements.\n\n**Programmer\'s note:** Functions are first-class objects. A "``def``"\nform executed inside a function definition defines a local function\nthat can be returned or passed around. Free variables used in the\nnested function can access the local variables of the function\ncontaining the def. See section *Naming and binding* for details.\n', 'global': '\nThe ``global`` statement\n************************\n\n global_stmt ::= "global" identifier ("," identifier)*\n\nThe ``global`` statement is a declaration which holds for the entire\ncurrent code block. It means that the listed identifiers are to be\ninterpreted as globals. It would be impossible to assign to a global\nvariable without ``global``, although free variables may refer to\nglobals without being declared global.\n\nNames listed in a ``global`` statement must not be used in the same\ncode block textually preceding that ``global`` statement.\n\nNames listed in a ``global`` statement must not be defined as formal\nparameters or in a ``for`` loop control target, ``class`` definition,\nfunction definition, or ``import`` statement.\n\n**CPython implementation detail:** The current implementation does not\nenforce the latter two restrictions, but programs should not abuse\nthis freedom, as future implementations may enforce them or silently\nchange the meaning of the program.\n\n**Programmer\'s note:** the ``global`` is a directive to the parser.\nIt applies only to code parsed at the same time as the ``global``\nstatement. In particular, a ``global`` statement contained in an\n``exec`` statement does not affect the code block *containing* the\n``exec`` statement, and code contained in an ``exec`` statement is\nunaffected by ``global`` statements in the code containing the\n``exec`` statement. The same applies to the ``eval()``,\n``execfile()`` and ``compile()`` functions.\n', 'id-classes': '\nReserved classes of identifiers\n*******************************\n\nCertain classes of identifiers (besides keywords) have special\nmeanings. These classes are identified by the patterns of leading and\ntrailing underscore characters:\n\n``_*``\n Not imported by ``from module import *``. The special identifier\n ``_`` is used in the interactive interpreter to store the result of\n the last evaluation; it is stored in the ``__builtin__`` module.\n When not in interactive mode, ``_`` has no special meaning and is\n not defined. See section *The import statement*.\n\n Note: The name ``_`` is often used in conjunction with\n internationalization; refer to the documentation for the\n ``gettext`` module for more information on this convention.\n\n``__*__``\n System-defined names. These names are defined by the interpreter\n and its implementation (including the standard library). Current\n system names are discussed in the *Special method names* section\n and elsewhere. More will likely be defined in future versions of\n Python. *Any* use of ``__*__`` names, in any context, that does\n not follow explicitly documented use, is subject to breakage\n without warning.\n\n``__*``\n Class-private names. Names in this category, when used within the\n context of a class definition, are re-written to use a mangled form\n to help avoid name clashes between "private" attributes of base and\n derived classes. See section *Identifiers (Names)*.\n', 'identifiers': '\nIdentifiers and keywords\n************************\n\nIdentifiers (also referred to as *names*) are described by the\nfollowing lexical definitions:\n\n identifier ::= (letter|"_") (letter | digit | "_")*\n letter ::= lowercase | uppercase\n lowercase ::= "a"..."z"\n uppercase ::= "A"..."Z"\n digit ::= "0"..."9"\n\nIdentifiers are unlimited in length. Case is significant.\n\n\nKeywords\n========\n\nThe following identifiers are used as reserved words, or *keywords* of\nthe language, and cannot be used as ordinary identifiers. They must\nbe spelled exactly as written here:\n\n and del from not while\n as elif global or with\n assert else if pass yield\n break except import print\n class exec in raise\n continue finally is return\n def for lambda try\n\nChanged in version 2.4: ``None`` became a constant and is now\nrecognized by the compiler as a name for the built-in object ``None``.\nAlthough it is not a keyword, you cannot assign a different object to\nit.\n\nChanged in version 2.5: Using ``as`` and ``with`` as identifiers\ntriggers a warning. To use them as keywords, enable the\n``with_statement`` future feature .\n\nChanged in version 2.6: ``as`` and ``with`` are full keywords.\n\n\nReserved classes of identifiers\n===============================\n\nCertain classes of identifiers (besides keywords) have special\nmeanings. These classes are identified by the patterns of leading and\ntrailing underscore characters:\n\n``_*``\n Not imported by ``from module import *``. The special identifier\n ``_`` is used in the interactive interpreter to store the result of\n the last evaluation; it is stored in the ``__builtin__`` module.\n When not in interactive mode, ``_`` has no special meaning and is\n not defined. See section *The import statement*.\n\n Note: The name ``_`` is often used in conjunction with\n internationalization; refer to the documentation for the\n ``gettext`` module for more information on this convention.\n\n``__*__``\n System-defined names. These names are defined by the interpreter\n and its implementation (including the standard library). Current\n system names are discussed in the *Special method names* section\n and elsewhere. More will likely be defined in future versions of\n Python. *Any* use of ``__*__`` names, in any context, that does\n not follow explicitly documented use, is subject to breakage\n without warning.\n\n``__*``\n Class-private names. Names in this category, when used within the\n context of a class definition, are re-written to use a mangled form\n to help avoid name clashes between "private" attributes of base and\n derived classes. See section *Identifiers (Names)*.\n', 'if': '\nThe ``if`` statement\n********************\n\nThe ``if`` statement is used for conditional execution:\n\n if_stmt ::= "if" expression ":" suite\n ( "elif" expression ":" suite )*\n ["else" ":" suite]\n\nIt selects exactly one of the suites by evaluating the expressions one\nby one until one is found to be true (see section *Boolean operations*\nfor the definition of true and false); then that suite is executed\n(and no other part of the ``if`` statement is executed or evaluated).\nIf all expressions are false, the suite of the ``else`` clause, if\npresent, is executed.\n', 'imaginary': '\nImaginary literals\n******************\n\nImaginary literals are described by the following lexical definitions:\n\n imagnumber ::= (floatnumber | intpart) ("j" | "J")\n\nAn imaginary literal yields a complex number with a real part of 0.0.\nComplex numbers are represented as a pair of floating point numbers\nand have the same restrictions on their range. To create a complex\nnumber with a nonzero real part, add a floating point number to it,\ne.g., ``(3+4j)``. Some examples of imaginary literals:\n\n 3.14j 10.j 10j .001j 1e100j 3.14e-10j\n', - 'import': '\nThe ``import`` statement\n************************\n\n import_stmt ::= "import" module ["as" name] ( "," module ["as" name] )*\n | "from" relative_module "import" identifier ["as" name]\n ( "," identifier ["as" name] )*\n | "from" relative_module "import" "(" identifier ["as" name]\n ( "," identifier ["as" name] )* [","] ")"\n | "from" module "import" "*"\n module ::= (identifier ".")* identifier\n relative_module ::= "."* module | "."+\n name ::= identifier\n\nImport statements are executed in two steps: (1) find a module, and\ninitialize it if necessary; (2) define a name or names in the local\nnamespace (of the scope where the ``import`` statement occurs). The\nstatement comes in two forms differing on whether it uses the ``from``\nkeyword. The first form (without ``from``) repeats these steps for\neach identifier in the list. The form with ``from`` performs step (1)\nonce, and then performs step (2) repeatedly.\n\nTo understand how step (1) occurs, one must first understand how\nPython handles hierarchical naming of modules. To help organize\nmodules and provide a hierarchy in naming, Python has a concept of\npackages. A package can contain other packages and modules while\nmodules cannot contain other modules or packages. From a file system\nperspective, packages are directories and modules are files. The\noriginal specification for packages is still available to read,\nalthough minor details have changed since the writing of that\ndocument.\n\nOnce the name of the module is known (unless otherwise specified, the\nterm "module" will refer to both packages and modules), searching for\nthe module or package can begin. The first place checked is\n``sys.modules``, the cache of all modules that have been imported\npreviously. If the module is found there then it is used in step (2)\nof import.\n\nIf the module is not found in the cache, then ``sys.meta_path`` is\nsearched (the specification for ``sys.meta_path`` can be found in\n**PEP 302**). The object is a list of *finder* objects which are\nqueried in order as to whether they know how to load the module by\ncalling their ``find_module()`` method with the name of the module. If\nthe module happens to be contained within a package (as denoted by the\nexistence of a dot in the name), then a second argument to\n``find_module()`` is given as the value of the ``__path__`` attribute\nfrom the parent package (everything up to the last dot in the name of\nthe module being imported). If a finder can find the module it returns\na *loader* (discussed later) or returns ``None``.\n\nIf none of the finders on ``sys.meta_path`` are able to find the\nmodule then some implicitly defined finders are queried.\nImplementations of Python vary in what implicit meta path finders are\ndefined. The one they all do define, though, is one that handles\n``sys.path_hooks``, ``sys.path_importer_cache``, and ``sys.path``.\n\nThe implicit finder searches for the requested module in the "paths"\nspecified in one of two places ("paths" do not have to be file system\npaths). If the module being imported is supposed to be contained\nwithin a package then the second argument passed to ``find_module()``,\n``__path__`` on the parent package, is used as the source of paths. If\nthe module is not contained in a package then ``sys.path`` is used as\nthe source of paths.\n\nOnce the source of paths is chosen it is iterated over to find a\nfinder that can handle that path. The dict at\n``sys.path_importer_cache`` caches finders for paths and is checked\nfor a finder. If the path does not have a finder cached then\n``sys.path_hooks`` is searched by calling each object in the list with\na single argument of the path, returning a finder or raises\n``ImportError``. If a finder is returned then it is cached in\n``sys.path_importer_cache`` and then used for that path entry. If no\nfinder can be found but the path exists then a value of ``None`` is\nstored in ``sys.path_importer_cache`` to signify that an implicit,\nfile-based finder that handles modules stored as individual files\nshould be used for that path. If the path does not exist then a finder\nwhich always returns *None`* is placed in the cache for the path.\n\nIf no finder can find the module then ``ImportError`` is raised.\nOtherwise some finder returned a loader whose ``load_module()`` method\nis called with the name of the module to load (see **PEP 302** for the\noriginal definition of loaders). A loader has several responsibilities\nto perform on a module it loads. First, if the module already exists\nin ``sys.modules`` (a possibility if the loader is called outside of\nthe import machinery) then it is to use that module for initialization\nand not a new module. But if the module does not exist in\n``sys.modules`` then it is to be added to that dict before\ninitialization begins. If an error occurs during loading of the module\nand it was added to ``sys.modules`` it is to be removed from the dict.\nIf an error occurs but the module was already in ``sys.modules`` it is\nleft in the dict.\n\nThe loader must set several attributes on the module. ``__name__`` is\nto be set to the name of the module. ``__file__`` is to be the "path"\nto the file unless the module is built-in (and thus listed in\n``sys.builtin_module_names``) in which case the attribute is not set.\nIf what is being imported is a package then ``__path__`` is to be set\nto a list of paths to be searched when looking for modules and\npackages contained within the package being imported. ``__package__``\nis optional but should be set to the name of package that contains the\nmodule or package (the empty string is used for module not contained\nin a package). ``__loader__`` is also optional but should be set to\nthe loader object that is loading the module.\n\nIf an error occurs during loading then the loader raises\n``ImportError`` if some other exception is not already being\npropagated. Otherwise the loader returns the module that was loaded\nand initialized.\n\nWhen step (1) finishes without raising an exception, step (2) can\nbegin.\n\nThe first form of ``import`` statement binds the module name in the\nlocal namespace to the module object, and then goes on to import the\nnext identifier, if any. If the module name is followed by ``as``,\nthe name following ``as`` is used as the local name for the module.\n\nThe ``from`` form does not bind the module name: it goes through the\nlist of identifiers, looks each one of them up in the module found in\nstep (1), and binds the name in the local namespace to the object thus\nfound. As with the first form of ``import``, an alternate local name\ncan be supplied by specifying "``as`` localname". If a name is not\nfound, ``ImportError`` is raised. If the list of identifiers is\nreplaced by a star (``\'*\'``), all public names defined in the module\nare bound in the local namespace of the ``import`` statement..\n\nThe *public names* defined by a module are determined by checking the\nmodule\'s namespace for a variable named ``__all__``; if defined, it\nmust be a sequence of strings which are names defined or imported by\nthat module. The names given in ``__all__`` are all considered public\nand are required to exist. If ``__all__`` is not defined, the set of\npublic names includes all names found in the module\'s namespace which\ndo not begin with an underscore character (``\'_\'``). ``__all__``\nshould contain the entire public API. It is intended to avoid\naccidentally exporting items that are not part of the API (such as\nlibrary modules which were imported and used within the module).\n\nThe ``from`` form with ``*`` may only occur in a module scope. If the\nwild card form of import --- ``import *`` --- is used in a function\nand the function contains or is a nested block with free variables,\nthe compiler will raise a ``SyntaxError``.\n\nWhen specifying what module to import you do not have to specify the\nabsolute name of the module. When a module or package is contained\nwithin another package it is possible to make a relative import within\nthe same top package without having to mention the package name. By\nusing leading dots in the specified module or package after ``from``\nyou can specify how high to traverse up the current package hierarchy\nwithout specifying exact names. One leading dot means the current\npackage where the module making the import exists. Two dots means up\none package level. Three dots is up two levels, etc. So if you execute\n``from . import mod`` from a module in the ``pkg`` package then you\nwill end up importing ``pkg.mod``. If you execute ``from ..subpkg2\nimport mod`` from within ``pkg.subpkg1`` you will import\n``pkg.subpkg2.mod``. The specification for relative imports is\ncontained within **PEP 328**.\n\n``importlib.import_module()`` is provided to support applications that\ndetermine which modules need to be loaded dynamically.\n\n\nFuture statements\n=================\n\nA *future statement* is a directive to the compiler that a particular\nmodule should be compiled using syntax or semantics that will be\navailable in a specified future release of Python. The future\nstatement is intended to ease migration to future versions of Python\nthat introduce incompatible changes to the language. It allows use of\nthe new features on a per-module basis before the release in which the\nfeature becomes standard.\n\n future_statement ::= "from" "__future__" "import" feature ["as" name]\n ("," feature ["as" name])*\n | "from" "__future__" "import" "(" feature ["as" name]\n ("," feature ["as" name])* [","] ")"\n feature ::= identifier\n name ::= identifier\n\nA future statement must appear near the top of the module. The only\nlines that can appear before a future statement are:\n\n* the module docstring (if any),\n\n* comments,\n\n* blank lines, and\n\n* other future statements.\n\nThe features recognized by Python 2.6 are ``unicode_literals``,\n``print_function``, ``absolute_import``, ``division``, ``generators``,\n``nested_scopes`` and ``with_statement``. ``generators``,\n``with_statement``, ``nested_scopes`` are redundant in Python version\n2.6 and above because they are always enabled.\n\nA future statement is recognized and treated specially at compile\ntime: Changes to the semantics of core constructs are often\nimplemented by generating different code. It may even be the case\nthat a new feature introduces new incompatible syntax (such as a new\nreserved word), in which case the compiler may need to parse the\nmodule differently. Such decisions cannot be pushed off until\nruntime.\n\nFor any given release, the compiler knows which feature names have\nbeen defined, and raises a compile-time error if a future statement\ncontains a feature not known to it.\n\nThe direct runtime semantics are the same as for any import statement:\nthere is a standard module ``__future__``, described later, and it\nwill be imported in the usual way at the time the future statement is\nexecuted.\n\nThe interesting runtime semantics depend on the specific feature\nenabled by the future statement.\n\nNote that there is nothing special about the statement:\n\n import __future__ [as name]\n\nThat is not a future statement; it\'s an ordinary import statement with\nno special semantics or syntax restrictions.\n\nCode compiled by an ``exec`` statement or calls to the built-in\nfunctions ``compile()`` and ``execfile()`` that occur in a module\n``M`` containing a future statement will, by default, use the new\nsyntax or semantics associated with the future statement. This can,\nstarting with Python 2.2 be controlled by optional arguments to\n``compile()`` --- see the documentation of that function for details.\n\nA future statement typed at an interactive interpreter prompt will\ntake effect for the rest of the interpreter session. If an\ninterpreter is started with the *-i* option, is passed a script name\nto execute, and the script includes a future statement, it will be in\neffect in the interactive session started after the script is\nexecuted.\n\nSee also:\n\n **PEP 236** - Back to the __future__\n The original proposal for the __future__ mechanism.\n', + 'import': '\nThe ``import`` statement\n************************\n\n import_stmt ::= "import" module ["as" name] ( "," module ["as" name] )*\n | "from" relative_module "import" identifier ["as" name]\n ( "," identifier ["as" name] )*\n | "from" relative_module "import" "(" identifier ["as" name]\n ( "," identifier ["as" name] )* [","] ")"\n | "from" module "import" "*"\n module ::= (identifier ".")* identifier\n relative_module ::= "."* module | "."+\n name ::= identifier\n\nImport statements are executed in two steps: (1) find a module, and\ninitialize it if necessary; (2) define a name or names in the local\nnamespace (of the scope where the ``import`` statement occurs). The\nstatement comes in two forms differing on whether it uses the ``from``\nkeyword. The first form (without ``from``) repeats these steps for\neach identifier in the list. The form with ``from`` performs step (1)\nonce, and then performs step (2) repeatedly.\n\nTo understand how step (1) occurs, one must first understand how\nPython handles hierarchical naming of modules. To help organize\nmodules and provide a hierarchy in naming, Python has a concept of\npackages. A package can contain other packages and modules while\nmodules cannot contain other modules or packages. From a file system\nperspective, packages are directories and modules are files. The\noriginal specification for packages is still available to read,\nalthough minor details have changed since the writing of that\ndocument.\n\nOnce the name of the module is known (unless otherwise specified, the\nterm "module" will refer to both packages and modules), searching for\nthe module or package can begin. The first place checked is\n``sys.modules``, the cache of all modules that have been imported\npreviously. If the module is found there then it is used in step (2)\nof import.\n\nIf the module is not found in the cache, then ``sys.meta_path`` is\nsearched (the specification for ``sys.meta_path`` can be found in\n**PEP 302**). The object is a list of *finder* objects which are\nqueried in order as to whether they know how to load the module by\ncalling their ``find_module()`` method with the name of the module. If\nthe module happens to be contained within a package (as denoted by the\nexistence of a dot in the name), then a second argument to\n``find_module()`` is given as the value of the ``__path__`` attribute\nfrom the parent package (everything up to the last dot in the name of\nthe module being imported). If a finder can find the module it returns\na *loader* (discussed later) or returns ``None``.\n\nIf none of the finders on ``sys.meta_path`` are able to find the\nmodule then some implicitly defined finders are queried.\nImplementations of Python vary in what implicit meta path finders are\ndefined. The one they all do define, though, is one that handles\n``sys.path_hooks``, ``sys.path_importer_cache``, and ``sys.path``.\n\nThe implicit finder searches for the requested module in the "paths"\nspecified in one of two places ("paths" do not have to be file system\npaths). If the module being imported is supposed to be contained\nwithin a package then the second argument passed to ``find_module()``,\n``__path__`` on the parent package, is used as the source of paths. If\nthe module is not contained in a package then ``sys.path`` is used as\nthe source of paths.\n\nOnce the source of paths is chosen it is iterated over to find a\nfinder that can handle that path. The dict at\n``sys.path_importer_cache`` caches finders for paths and is checked\nfor a finder. If the path does not have a finder cached then\n``sys.path_hooks`` is searched by calling each object in the list with\na single argument of the path, returning a finder or raises\n``ImportError``. If a finder is returned then it is cached in\n``sys.path_importer_cache`` and then used for that path entry. If no\nfinder can be found but the path exists then a value of ``None`` is\nstored in ``sys.path_importer_cache`` to signify that an implicit,\nfile-based finder that handles modules stored as individual files\nshould be used for that path. If the path does not exist then a finder\nwhich always returns ``None`` is placed in the cache for the path.\n\nIf no finder can find the module then ``ImportError`` is raised.\nOtherwise some finder returned a loader whose ``load_module()`` method\nis called with the name of the module to load (see **PEP 302** for the\noriginal definition of loaders). A loader has several responsibilities\nto perform on a module it loads. First, if the module already exists\nin ``sys.modules`` (a possibility if the loader is called outside of\nthe import machinery) then it is to use that module for initialization\nand not a new module. But if the module does not exist in\n``sys.modules`` then it is to be added to that dict before\ninitialization begins. If an error occurs during loading of the module\nand it was added to ``sys.modules`` it is to be removed from the dict.\nIf an error occurs but the module was already in ``sys.modules`` it is\nleft in the dict.\n\nThe loader must set several attributes on the module. ``__name__`` is\nto be set to the name of the module. ``__file__`` is to be the "path"\nto the file unless the module is built-in (and thus listed in\n``sys.builtin_module_names``) in which case the attribute is not set.\nIf what is being imported is a package then ``__path__`` is to be set\nto a list of paths to be searched when looking for modules and\npackages contained within the package being imported. ``__package__``\nis optional but should be set to the name of package that contains the\nmodule or package (the empty string is used for module not contained\nin a package). ``__loader__`` is also optional but should be set to\nthe loader object that is loading the module.\n\nIf an error occurs during loading then the loader raises\n``ImportError`` if some other exception is not already being\npropagated. Otherwise the loader returns the module that was loaded\nand initialized.\n\nWhen step (1) finishes without raising an exception, step (2) can\nbegin.\n\nThe first form of ``import`` statement binds the module name in the\nlocal namespace to the module object, and then goes on to import the\nnext identifier, if any. If the module name is followed by ``as``,\nthe name following ``as`` is used as the local name for the module.\n\nThe ``from`` form does not bind the module name: it goes through the\nlist of identifiers, looks each one of them up in the module found in\nstep (1), and binds the name in the local namespace to the object thus\nfound. As with the first form of ``import``, an alternate local name\ncan be supplied by specifying "``as`` localname". If a name is not\nfound, ``ImportError`` is raised. If the list of identifiers is\nreplaced by a star (``\'*\'``), all public names defined in the module\nare bound in the local namespace of the ``import`` statement..\n\nThe *public names* defined by a module are determined by checking the\nmodule\'s namespace for a variable named ``__all__``; if defined, it\nmust be a sequence of strings which are names defined or imported by\nthat module. The names given in ``__all__`` are all considered public\nand are required to exist. If ``__all__`` is not defined, the set of\npublic names includes all names found in the module\'s namespace which\ndo not begin with an underscore character (``\'_\'``). ``__all__``\nshould contain the entire public API. It is intended to avoid\naccidentally exporting items that are not part of the API (such as\nlibrary modules which were imported and used within the module).\n\nThe ``from`` form with ``*`` may only occur in a module scope. If the\nwild card form of import --- ``import *`` --- is used in a function\nand the function contains or is a nested block with free variables,\nthe compiler will raise a ``SyntaxError``.\n\nWhen specifying what module to import you do not have to specify the\nabsolute name of the module. When a module or package is contained\nwithin another package it is possible to make a relative import within\nthe same top package without having to mention the package name. By\nusing leading dots in the specified module or package after ``from``\nyou can specify how high to traverse up the current package hierarchy\nwithout specifying exact names. One leading dot means the current\npackage where the module making the import exists. Two dots means up\none package level. Three dots is up two levels, etc. So if you execute\n``from . import mod`` from a module in the ``pkg`` package then you\nwill end up importing ``pkg.mod``. If you execute ``from ..subpkg2\nimport mod`` from within ``pkg.subpkg1`` you will import\n``pkg.subpkg2.mod``. The specification for relative imports is\ncontained within **PEP 328**.\n\n``importlib.import_module()`` is provided to support applications that\ndetermine which modules need to be loaded dynamically.\n\n\nFuture statements\n=================\n\nA *future statement* is a directive to the compiler that a particular\nmodule should be compiled using syntax or semantics that will be\navailable in a specified future release of Python. The future\nstatement is intended to ease migration to future versions of Python\nthat introduce incompatible changes to the language. It allows use of\nthe new features on a per-module basis before the release in which the\nfeature becomes standard.\n\n future_statement ::= "from" "__future__" "import" feature ["as" name]\n ("," feature ["as" name])*\n | "from" "__future__" "import" "(" feature ["as" name]\n ("," feature ["as" name])* [","] ")"\n feature ::= identifier\n name ::= identifier\n\nA future statement must appear near the top of the module. The only\nlines that can appear before a future statement are:\n\n* the module docstring (if any),\n\n* comments,\n\n* blank lines, and\n\n* other future statements.\n\nThe features recognized by Python 2.6 are ``unicode_literals``,\n``print_function``, ``absolute_import``, ``division``, ``generators``,\n``nested_scopes`` and ``with_statement``. ``generators``,\n``with_statement``, ``nested_scopes`` are redundant in Python version\n2.6 and above because they are always enabled.\n\nA future statement is recognized and treated specially at compile\ntime: Changes to the semantics of core constructs are often\nimplemented by generating different code. It may even be the case\nthat a new feature introduces new incompatible syntax (such as a new\nreserved word), in which case the compiler may need to parse the\nmodule differently. Such decisions cannot be pushed off until\nruntime.\n\nFor any given release, the compiler knows which feature names have\nbeen defined, and raises a compile-time error if a future statement\ncontains a feature not known to it.\n\nThe direct runtime semantics are the same as for any import statement:\nthere is a standard module ``__future__``, described later, and it\nwill be imported in the usual way at the time the future statement is\nexecuted.\n\nThe interesting runtime semantics depend on the specific feature\nenabled by the future statement.\n\nNote that there is nothing special about the statement:\n\n import __future__ [as name]\n\nThat is not a future statement; it\'s an ordinary import statement with\nno special semantics or syntax restrictions.\n\nCode compiled by an ``exec`` statement or calls to the built-in\nfunctions ``compile()`` and ``execfile()`` that occur in a module\n``M`` containing a future statement will, by default, use the new\nsyntax or semantics associated with the future statement. This can,\nstarting with Python 2.2 be controlled by optional arguments to\n``compile()`` --- see the documentation of that function for details.\n\nA future statement typed at an interactive interpreter prompt will\ntake effect for the rest of the interpreter session. If an\ninterpreter is started with the *-i* option, is passed a script name\nto execute, and the script includes a future statement, it will be in\neffect in the interactive session started after the script is\nexecuted.\n\nSee also:\n\n **PEP 236** - Back to the __future__\n The original proposal for the __future__ mechanism.\n', 'in': '\nComparisons\n***********\n\nUnlike C, all comparison operations in Python have the same priority,\nwhich is lower than that of any arithmetic, shifting or bitwise\noperation. Also unlike C, expressions like ``a < b < c`` have the\ninterpretation that is conventional in mathematics:\n\n comparison ::= or_expr ( comp_operator or_expr )*\n comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "<>" | "!="\n | "is" ["not"] | ["not"] "in"\n\nComparisons yield boolean values: ``True`` or ``False``.\n\nComparisons can be chained arbitrarily, e.g., ``x < y <= z`` is\nequivalent to ``x < y and y <= z``, except that ``y`` is evaluated\nonly once (but in both cases ``z`` is not evaluated at all when ``x <\ny`` is found to be false).\n\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\n*op2*, ..., *opN* are comparison operators, then ``a op1 b op2 c ... y\nopN z`` is equivalent to ``a op1 b and b op2 c and ... y opN z``,\nexcept that each expression is evaluated at most once.\n\nNote that ``a op1 b op2 c`` doesn\'t imply any kind of comparison\nbetween *a* and *c*, so that, e.g., ``x < y > z`` is perfectly legal\n(though perhaps not pretty).\n\nThe forms ``<>`` and ``!=`` are equivalent; for consistency with C,\n``!=`` is preferred; where ``!=`` is mentioned below ``<>`` is also\naccepted. The ``<>`` spelling is considered obsolescent.\n\nThe operators ``<``, ``>``, ``==``, ``>=``, ``<=``, and ``!=`` compare\nthe values of two objects. The objects need not have the same type.\nIf both are numbers, they are converted to a common type. Otherwise,\nobjects of different types *always* compare unequal, and are ordered\nconsistently but arbitrarily. You can control comparison behavior of\nobjects of non-built-in types by defining a ``__cmp__`` method or rich\ncomparison methods like ``__gt__``, described in section *Special\nmethod names*.\n\n(This unusual definition of comparison was used to simplify the\ndefinition of operations like sorting and the ``in`` and ``not in``\noperators. In the future, the comparison rules for objects of\ndifferent types are likely to change.)\n\nComparison of objects of the same type depends on the type:\n\n* Numbers are compared arithmetically.\n\n* Strings are compared lexicographically using the numeric equivalents\n (the result of the built-in function ``ord()``) of their characters.\n Unicode and 8-bit strings are fully interoperable in this behavior.\n [4]\n\n* Tuples and lists are compared lexicographically using comparison of\n corresponding elements. This means that to compare equal, each\n element must compare equal and the two sequences must be of the same\n type and have the same length.\n\n If not equal, the sequences are ordered the same as their first\n differing elements. For example, ``cmp([1,2,x], [1,2,y])`` returns\n the same as ``cmp(x,y)``. If the corresponding element does not\n exist, the shorter sequence is ordered first (for example, ``[1,2] <\n [1,2,3]``).\n\n* Mappings (dictionaries) compare equal if and only if their sorted\n (key, value) lists compare equal. [5] Outcomes other than equality\n are resolved consistently, but are not otherwise defined. [6]\n\n* Most other objects of built-in types compare unequal unless they are\n the same object; the choice whether one object is considered smaller\n or larger than another one is made arbitrarily but consistently\n within one execution of a program.\n\nThe operators ``in`` and ``not in`` test for collection membership.\n``x in s`` evaluates to true if *x* is a member of the collection *s*,\nand false otherwise. ``x not in s`` returns the negation of ``x in\ns``. The collection membership test has traditionally been bound to\nsequences; an object is a member of a collection if the collection is\na sequence and contains an element equal to that object. However, it\nmake sense for many other object types to support membership tests\nwithout being a sequence. In particular, dictionaries (for keys) and\nsets support membership testing.\n\nFor the list and tuple types, ``x in y`` is true if and only if there\nexists an index *i* such that ``x == y[i]`` is true.\n\nFor the Unicode and string types, ``x in y`` is true if and only if\n*x* is a substring of *y*. An equivalent test is ``y.find(x) != -1``.\nNote, *x* and *y* need not be the same type; consequently, ``u\'ab\' in\n\'abc\'`` will return ``True``. Empty strings are always considered to\nbe a substring of any other string, so ``"" in "abc"`` will return\n``True``.\n\nChanged in version 2.3: Previously, *x* was required to be a string of\nlength ``1``.\n\nFor user-defined classes which define the ``__contains__()`` method,\n``x in y`` is true if and only if ``y.__contains__(x)`` is true.\n\nFor user-defined classes which do not define ``__contains__()`` but do\ndefine ``__iter__()``, ``x in y`` is true if some value ``z`` with ``x\n== z`` is produced while iterating over ``y``. If an exception is\nraised during the iteration, it is as if ``in`` raised that exception.\n\nLastly, the old-style iteration protocol is tried: if a class defines\n``__getitem__()``, ``x in y`` is true if and only if there is a non-\nnegative integer index *i* such that ``x == y[i]``, and all lower\ninteger indices do not raise ``IndexError`` exception. (If any other\nexception is raised, it is as if ``in`` raised that exception).\n\nThe operator ``not in`` is defined to have the inverse true value of\n``in``.\n\nThe operators ``is`` and ``is not`` test for object identity: ``x is\ny`` is true if and only if *x* and *y* are the same object. ``x is\nnot y`` yields the inverse truth value. [7]\n', 'integers': '\nInteger and long integer literals\n*********************************\n\nInteger and long integer literals are described by the following\nlexical definitions:\n\n longinteger ::= integer ("l" | "L")\n integer ::= decimalinteger | octinteger | hexinteger | bininteger\n decimalinteger ::= nonzerodigit digit* | "0"\n octinteger ::= "0" ("o" | "O") octdigit+ | "0" octdigit+\n hexinteger ::= "0" ("x" | "X") hexdigit+\n bininteger ::= "0" ("b" | "B") bindigit+\n nonzerodigit ::= "1"..."9"\n octdigit ::= "0"..."7"\n bindigit ::= "0" | "1"\n hexdigit ::= digit | "a"..."f" | "A"..."F"\n\nAlthough both lower case ``\'l\'`` and upper case ``\'L\'`` are allowed as\nsuffix for long integers, it is strongly recommended to always use\n``\'L\'``, since the letter ``\'l\'`` looks too much like the digit\n``\'1\'``.\n\nPlain integer literals that are above the largest representable plain\ninteger (e.g., 2147483647 when using 32-bit arithmetic) are accepted\nas if they were long integers instead. [1] There is no limit for long\ninteger literals apart from what can be stored in available memory.\n\nSome examples of plain integer literals (first row) and long integer\nliterals (second and third rows):\n\n 7 2147483647 0177\n 3L 79228162514264337593543950336L 0377L 0x100000000L\n 79228162514264337593543950336 0xdeadbeef\n', 'lambda': '\nLambdas\n*******\n\n lambda_form ::= "lambda" [parameter_list]: expression\n old_lambda_form ::= "lambda" [parameter_list]: old_expression\n\nLambda forms (lambda expressions) have the same syntactic position as\nexpressions. They are a shorthand to create anonymous functions; the\nexpression ``lambda arguments: expression`` yields a function object.\nThe unnamed object behaves like a function object defined with\n\n def name(arguments):\n return expression\n\nSee section *Function definitions* for the syntax of parameter lists.\nNote that functions created with lambda forms cannot contain\nstatements.\n', @@ -49,7 +49,7 @@ 'numbers': "\nNumeric literals\n****************\n\nThere are four types of numeric literals: plain integers, long\nintegers, floating point numbers, and imaginary numbers. There are no\ncomplex literals (complex numbers can be formed by adding a real\nnumber and an imaginary number).\n\nNote that numeric literals do not include a sign; a phrase like ``-1``\nis actually an expression composed of the unary operator '``-``' and\nthe literal ``1``.\n", 'numeric-types': '\nEmulating numeric types\n***********************\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``//``, ``%``, ``divmod()``,\n ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``, ``|``). For\n instance, to evaluate the expression ``x + y``, where *x* is an\n instance of a class that has an ``__add__()`` method,\n ``x.__add__(y)`` is called. The ``__divmod__()`` method should be\n the equivalent to using ``__floordiv__()`` and ``__mod__()``; it\n should not be related to ``__truediv__()`` (described below). Note\n that ``__pow__()`` should be defined to accept an optional third\n argument if the ternary version of the built-in ``pow()`` function\n is to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return ``NotImplemented``.\n\nobject.__div__(self, other)\nobject.__truediv__(self, other)\n\n The division operator (``/``) is implemented by these methods. The\n ``__truediv__()`` method is used when ``__future__.division`` is in\n effect, otherwise ``__div__()`` is used. If only one of these two\n methods is defined, the object will not support division in the\n alternate context; ``TypeError`` will be raised instead.\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rdiv__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``%``, ``divmod()``,\n ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``, ``|``) with\n reflected (swapped) operands. These functions are only called if\n the left operand does not support the corresponding operation and\n the operands are of different types. [2] For instance, to evaluate\n the expression ``x - y``, where *y* is an instance of a class that\n has an ``__rsub__()`` method, ``y.__rsub__(x)`` is called if\n ``x.__sub__(y)`` returns *NotImplemented*.\n\n Note that ternary ``pow()`` will not try calling ``__rpow__()``\n (the coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left operand\'s\n type and that subclass provides the reflected method for the\n operation, this method will be called before the left operand\'s\n non-reflected method. This behavior allows subclasses to\n override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__idiv__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments (``+=``, ``-=``, ``*=``, ``/=``, ``//=``, ``%=``,\n ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods\n should attempt to do the operation in-place (modifying *self*) and\n return the result (which could be, but does not have to be,\n *self*). If a specific method is not defined, the augmented\n assignment falls back to the normal methods. For instance, to\n execute the statement ``x += y``, where *x* is an instance of a\n class that has an ``__iadd__()`` method, ``x.__iadd__(y)`` is\n called. If *x* is an instance of a class that does not define a\n ``__iadd__()`` method, ``x.__add__(y)`` and ``y.__radd__(x)`` are\n considered, as with the evaluation of ``x + y``.\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations (``-``, ``+``,\n ``abs()`` and ``~``).\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__long__(self)\nobject.__float__(self)\n\n Called to implement the built-in functions ``complex()``,\n ``int()``, ``long()``, and ``float()``. Should return a value of\n the appropriate type.\n\nobject.__oct__(self)\nobject.__hex__(self)\n\n Called to implement the built-in functions ``oct()`` and ``hex()``.\n Should return a string value.\n\nobject.__index__(self)\n\n Called to implement ``operator.index()``. Also called whenever\n Python needs an integer object (such as in slicing). Must return\n an integer (int or long).\n\n New in version 2.5.\n\nobject.__coerce__(self, other)\n\n Called to implement "mixed-mode" numeric arithmetic. Should either\n return a 2-tuple containing *self* and *other* converted to a\n common numeric type, or ``None`` if conversion is impossible. When\n the common type would be the type of ``other``, it is sufficient to\n return ``None``, since the interpreter will also ask the other\n object to attempt a coercion (but sometimes, if the implementation\n of the other type cannot be changed, it is useful to do the\n conversion to the other type here). A return value of\n ``NotImplemented`` is equivalent to returning ``None``.\n', 'objects': '\nObjects, values and types\n*************************\n\n*Objects* are Python\'s abstraction for data. All data in a Python\nprogram is represented by objects or by relations between objects. (In\na sense, and in conformance to Von Neumann\'s model of a "stored\nprogram computer," code is also represented by objects.)\n\nEvery object has an identity, a type and a value. An object\'s\n*identity* never changes once it has been created; you may think of it\nas the object\'s address in memory. The \'``is``\' operator compares the\nidentity of two objects; the ``id()`` function returns an integer\nrepresenting its identity (currently implemented as its address). An\nobject\'s *type* is also unchangeable. [1] An object\'s type determines\nthe operations that the object supports (e.g., "does it have a\nlength?") and also defines the possible values for objects of that\ntype. The ``type()`` function returns an object\'s type (which is an\nobject itself). The *value* of some objects can change. Objects\nwhose value can change are said to be *mutable*; objects whose value\nis unchangeable once they are created are called *immutable*. (The\nvalue of an immutable container object that contains a reference to a\nmutable object can change when the latter\'s value is changed; however\nthe container is still considered immutable, because the collection of\nobjects it contains cannot be changed. So, immutability is not\nstrictly the same as having an unchangeable value, it is more subtle.)\nAn object\'s mutability is determined by its type; for instance,\nnumbers, strings and tuples are immutable, while dictionaries and\nlists are mutable.\n\nObjects are never explicitly destroyed; however, when they become\nunreachable they may be garbage-collected. An implementation is\nallowed to postpone garbage collection or omit it altogether --- it is\na matter of implementation quality how garbage collection is\nimplemented, as long as no objects are collected that are still\nreachable.\n\n**CPython implementation detail:** CPython currently uses a reference-\ncounting scheme with (optional) delayed detection of cyclically linked\ngarbage, which collects most objects as soon as they become\nunreachable, but is not guaranteed to collect garbage containing\ncircular references. See the documentation of the ``gc`` module for\ninformation on controlling the collection of cyclic garbage. Other\nimplementations act differently and CPython may change. Do not depend\non immediate finalization of objects when they become unreachable (ex:\nalways close files).\n\nNote that the use of the implementation\'s tracing or debugging\nfacilities may keep objects alive that would normally be collectable.\nAlso note that catching an exception with a \'``try``...``except``\'\nstatement may keep objects alive.\n\nSome objects contain references to "external" resources such as open\nfiles or windows. It is understood that these resources are freed\nwhen the object is garbage-collected, but since garbage collection is\nnot guaranteed to happen, such objects also provide an explicit way to\nrelease the external resource, usually a ``close()`` method. Programs\nare strongly recommended to explicitly close such objects. The\n\'``try``...``finally``\' statement provides a convenient way to do\nthis.\n\nSome objects contain references to other objects; these are called\n*containers*. Examples of containers are tuples, lists and\ndictionaries. The references are part of a container\'s value. In\nmost cases, when we talk about the value of a container, we imply the\nvalues, not the identities of the contained objects; however, when we\ntalk about the mutability of a container, only the identities of the\nimmediately contained objects are implied. So, if an immutable\ncontainer (like a tuple) contains a reference to a mutable object, its\nvalue changes if that mutable object is changed.\n\nTypes affect almost all aspects of object behavior. Even the\nimportance of object identity is affected in some sense: for immutable\ntypes, operations that compute new values may actually return a\nreference to any existing object with the same type and value, while\nfor mutable objects this is not allowed. E.g., after ``a = 1; b =\n1``, ``a`` and ``b`` may or may not refer to the same object with the\nvalue one, depending on the implementation, but after ``c = []; d =\n[]``, ``c`` and ``d`` are guaranteed to refer to two different,\nunique, newly created empty lists. (Note that ``c = d = []`` assigns\nthe same object to both ``c`` and ``d``.)\n', - 'operator-summary': '\nOperator precedence\n*******************\n\nThe following table summarizes the operator precedences in Python,\nfrom lowest precedence (least binding) to highest precedence (most\nbinding). Operators in the same box have the same precedence. Unless\nthe syntax is explicitly given, operators are binary. Operators in\nthe same box group left to right (except for comparisons, including\ntests, which all have the same precedence and chain from left to right\n--- see section *Comparisons* --- and exponentiation, which groups\nfrom right to left).\n\n+-------------------------------------------------+---------------------------------------+\n| Operator | Description |\n+=================================================+=======================================+\n| ``lambda`` | Lambda expression |\n+-------------------------------------------------+---------------------------------------+\n| ``if`` -- ``else`` | Conditional expression |\n+-------------------------------------------------+---------------------------------------+\n| ``or`` | Boolean OR |\n+-------------------------------------------------+---------------------------------------+\n| ``and`` | Boolean AND |\n+-------------------------------------------------+---------------------------------------+\n| ``not`` ``x`` | Boolean NOT |\n+-------------------------------------------------+---------------------------------------+\n| ``in``, ``not in``, ``is``, ``is not``, ``<``, | Comparisons, including membership |\n| ``<=``, ``>``, ``>=``, ``<>``, ``!=``, ``==`` | tests and identity tests, |\n+-------------------------------------------------+---------------------------------------+\n| ``|`` | Bitwise OR |\n+-------------------------------------------------+---------------------------------------+\n| ``^`` | Bitwise XOR |\n+-------------------------------------------------+---------------------------------------+\n| ``&`` | Bitwise AND |\n+-------------------------------------------------+---------------------------------------+\n| ``<<``, ``>>`` | Shifts |\n+-------------------------------------------------+---------------------------------------+\n| ``+``, ``-`` | Addition and subtraction |\n+-------------------------------------------------+---------------------------------------+\n| ``*``, ``/``, ``//``, ``%`` | Multiplication, division, remainder |\n| | [8] |\n+-------------------------------------------------+---------------------------------------+\n| ``+x``, ``-x``, ``~x`` | Positive, negative, bitwise NOT |\n+-------------------------------------------------+---------------------------------------+\n| ``**`` | Exponentiation [9] |\n+-------------------------------------------------+---------------------------------------+\n| ``x[index]``, ``x[index:index]``, | Subscription, slicing, call, |\n| ``x(arguments...)``, ``x.attribute`` | attribute reference |\n+-------------------------------------------------+---------------------------------------+\n| ``(expressions...)``, ``[expressions...]``, | Binding or tuple display, list |\n| ``{key: value...}``, ```expressions...``` | display, dictionary display, string |\n| | conversion |\n+-------------------------------------------------+---------------------------------------+\n\n-[ Footnotes ]-\n\n[1] In Python 2.3 and later releases, a list comprehension "leaks" the\n control variables of each ``for`` it contains into the containing\n scope. However, this behavior is deprecated, and relying on it\n will not work in Python 3.\n\n[2] While ``abs(x%y) < abs(y)`` is true mathematically, for floats it\n may not be true numerically due to roundoff. For example, and\n assuming a platform on which a Python float is an IEEE 754 double-\n precision number, in order that ``-1e-100 % 1e100`` have the same\n sign as ``1e100``, the computed result is ``-1e-100 + 1e100``,\n which is numerically exactly equal to ``1e100``. The function\n ``math.fmod()`` returns a result whose sign matches the sign of\n the first argument instead, and so returns ``-1e-100`` in this\n case. Which approach is more appropriate depends on the\n application.\n\n[3] If x is very close to an exact integer multiple of y, it\'s\n possible for ``floor(x/y)`` to be one larger than ``(x-x%y)/y``\n due to rounding. In such cases, Python returns the latter result,\n in order to preserve that ``divmod(x,y)[0] * y + x % y`` be very\n close to ``x``.\n\n[4] While comparisons between unicode strings make sense at the byte\n level, they may be counter-intuitive to users. For example, the\n strings ``u"\\u00C7"`` and ``u"\\u0043\\u0327"`` compare differently,\n even though they both represent the same unicode character (LATIN\n CAPITAL LETTER C WITH CEDILLA). To compare strings in a human\n recognizable way, compare using ``unicodedata.normalize()``.\n\n[5] The implementation computes this efficiently, without constructing\n lists or sorting.\n\n[6] Earlier versions of Python used lexicographic comparison of the\n sorted (key, value) lists, but this was very expensive for the\n common case of comparing for equality. An even earlier version of\n Python compared dictionaries by identity only, but this caused\n surprises because people expected to be able to test a dictionary\n for emptiness by comparing it to ``{}``.\n\n[7] Due to automatic garbage-collection, free lists, and the dynamic\n nature of descriptors, you may notice seemingly unusual behaviour\n in certain uses of the ``is`` operator, like those involving\n comparisons between instance methods, or constants. Check their\n documentation for more info.\n\n[8] The ``%`` operator is also used for string formatting; the same\n precedence applies.\n\n[9] The power operator ``**`` binds less tightly than an arithmetic or\n bitwise unary operator on its right, that is, ``2**-1`` is\n ``0.5``.\n', + 'operator-summary': '\nOperator precedence\n*******************\n\nThe following table summarizes the operator precedences in Python,\nfrom lowest precedence (least binding) to highest precedence (most\nbinding). Operators in the same box have the same precedence. Unless\nthe syntax is explicitly given, operators are binary. Operators in\nthe same box group left to right (except for comparisons, including\ntests, which all have the same precedence and chain from left to right\n--- see section *Comparisons* --- and exponentiation, which groups\nfrom right to left).\n\n+-------------------------------------------------+---------------------------------------+\n| Operator | Description |\n+=================================================+=======================================+\n| ``lambda`` | Lambda expression |\n+-------------------------------------------------+---------------------------------------+\n| ``if`` -- ``else`` | Conditional expression |\n+-------------------------------------------------+---------------------------------------+\n| ``or`` | Boolean OR |\n+-------------------------------------------------+---------------------------------------+\n| ``and`` | Boolean AND |\n+-------------------------------------------------+---------------------------------------+\n| ``not`` ``x`` | Boolean NOT |\n+-------------------------------------------------+---------------------------------------+\n| ``in``, ``not in``, ``is``, ``is not``, ``<``, | Comparisons, including membership |\n| ``<=``, ``>``, ``>=``, ``<>``, ``!=``, ``==`` | tests and identity tests |\n+-------------------------------------------------+---------------------------------------+\n| ``|`` | Bitwise OR |\n+-------------------------------------------------+---------------------------------------+\n| ``^`` | Bitwise XOR |\n+-------------------------------------------------+---------------------------------------+\n| ``&`` | Bitwise AND |\n+-------------------------------------------------+---------------------------------------+\n| ``<<``, ``>>`` | Shifts |\n+-------------------------------------------------+---------------------------------------+\n| ``+``, ``-`` | Addition and subtraction |\n+-------------------------------------------------+---------------------------------------+\n| ``*``, ``/``, ``//``, ``%`` | Multiplication, division, remainder |\n| | [8] |\n+-------------------------------------------------+---------------------------------------+\n| ``+x``, ``-x``, ``~x`` | Positive, negative, bitwise NOT |\n+-------------------------------------------------+---------------------------------------+\n| ``**`` | Exponentiation [9] |\n+-------------------------------------------------+---------------------------------------+\n| ``x[index]``, ``x[index:index]``, | Subscription, slicing, call, |\n| ``x(arguments...)``, ``x.attribute`` | attribute reference |\n+-------------------------------------------------+---------------------------------------+\n| ``(expressions...)``, ``[expressions...]``, | Binding or tuple display, list |\n| ``{key: value...}``, ```expressions...``` | display, dictionary display, string |\n| | conversion |\n+-------------------------------------------------+---------------------------------------+\n\n-[ Footnotes ]-\n\n[1] In Python 2.3 and later releases, a list comprehension "leaks" the\n control variables of each ``for`` it contains into the containing\n scope. However, this behavior is deprecated, and relying on it\n will not work in Python 3.\n\n[2] While ``abs(x%y) < abs(y)`` is true mathematically, for floats it\n may not be true numerically due to roundoff. For example, and\n assuming a platform on which a Python float is an IEEE 754 double-\n precision number, in order that ``-1e-100 % 1e100`` have the same\n sign as ``1e100``, the computed result is ``-1e-100 + 1e100``,\n which is numerically exactly equal to ``1e100``. The function\n ``math.fmod()`` returns a result whose sign matches the sign of\n the first argument instead, and so returns ``-1e-100`` in this\n case. Which approach is more appropriate depends on the\n application.\n\n[3] If x is very close to an exact integer multiple of y, it\'s\n possible for ``floor(x/y)`` to be one larger than ``(x-x%y)/y``\n due to rounding. In such cases, Python returns the latter result,\n in order to preserve that ``divmod(x,y)[0] * y + x % y`` be very\n close to ``x``.\n\n[4] While comparisons between unicode strings make sense at the byte\n level, they may be counter-intuitive to users. For example, the\n strings ``u"\\u00C7"`` and ``u"\\u0043\\u0327"`` compare differently,\n even though they both represent the same unicode character (LATIN\n CAPITAL LETTER C WITH CEDILLA). To compare strings in a human\n recognizable way, compare using ``unicodedata.normalize()``.\n\n[5] The implementation computes this efficiently, without constructing\n lists or sorting.\n\n[6] Earlier versions of Python used lexicographic comparison of the\n sorted (key, value) lists, but this was very expensive for the\n common case of comparing for equality. An even earlier version of\n Python compared dictionaries by identity only, but this caused\n surprises because people expected to be able to test a dictionary\n for emptiness by comparing it to ``{}``.\n\n[7] Due to automatic garbage-collection, free lists, and the dynamic\n nature of descriptors, you may notice seemingly unusual behaviour\n in certain uses of the ``is`` operator, like those involving\n comparisons between instance methods, or constants. Check their\n documentation for more info.\n\n[8] The ``%`` operator is also used for string formatting; the same\n precedence applies.\n\n[9] The power operator ``**`` binds less tightly than an arithmetic or\n bitwise unary operator on its right, that is, ``2**-1`` is\n ``0.5``.\n', 'pass': '\nThe ``pass`` statement\n**********************\n\n pass_stmt ::= "pass"\n\n``pass`` is a null operation --- when it is executed, nothing happens.\nIt is useful as a placeholder when a statement is required\nsyntactically, but no code needs to be executed, for example:\n\n def f(arg): pass # a function that does nothing (yet)\n\n class C: pass # a class with no methods (yet)\n', 'power': '\nThe power operator\n******************\n\nThe power operator binds more tightly than unary operators on its\nleft; it binds less tightly than unary operators on its right. The\nsyntax is:\n\n power ::= primary ["**" u_expr]\n\nThus, in an unparenthesized sequence of power and unary operators, the\noperators are evaluated from right to left (this does not constrain\nthe evaluation order for the operands): ``-1**2`` results in ``-1``.\n\nThe power operator has the same semantics as the built-in ``pow()``\nfunction, when called with two arguments: it yields its left argument\nraised to the power of its right argument. The numeric arguments are\nfirst converted to a common type. The result type is that of the\narguments after coercion.\n\nWith mixed operand types, the coercion rules for binary arithmetic\noperators apply. For int and long int operands, the result has the\nsame type as the operands (after coercion) unless the second argument\nis negative; in that case, all arguments are converted to float and a\nfloat result is delivered. For example, ``10**2`` returns ``100``, but\n``10**-2`` returns ``0.01``. (This last feature was added in Python\n2.2. In Python 2.1 and before, if both arguments were of integer types\nand the second argument was negative, an exception was raised).\n\nRaising ``0.0`` to a negative power results in a\n``ZeroDivisionError``. Raising a negative number to a fractional power\nresults in a ``ValueError``.\n', 'raise': '\nThe ``raise`` statement\n***********************\n\n raise_stmt ::= "raise" [expression ["," expression ["," expression]]]\n\nIf no expressions are present, ``raise`` re-raises the last exception\nthat was active in the current scope. If no exception is active in\nthe current scope, a ``TypeError`` exception is raised indicating that\nthis is an error (if running under IDLE, a ``Queue.Empty`` exception\nis raised instead).\n\nOtherwise, ``raise`` evaluates the expressions to get three objects,\nusing ``None`` as the value of omitted expressions. The first two\nobjects are used to determine the *type* and *value* of the exception.\n\nIf the first object is an instance, the type of the exception is the\nclass of the instance, the instance itself is the value, and the\nsecond object must be ``None``.\n\nIf the first object is a class, it becomes the type of the exception.\nThe second object is used to determine the exception value: If it is\nan instance of the class, the instance becomes the exception value. If\nthe second object is a tuple, it is used as the argument list for the\nclass constructor; if it is ``None``, an empty argument list is used,\nand any other object is treated as a single argument to the\nconstructor. The instance so created by calling the constructor is\nused as the exception value.\n\nIf a third object is present and not ``None``, it must be a traceback\nobject (see section *The standard type hierarchy*), and it is\nsubstituted instead of the current location as the place where the\nexception occurred. If the third object is present and not a\ntraceback object or ``None``, a ``TypeError`` exception is raised.\nThe three-expression form of ``raise`` is useful to re-raise an\nexception transparently in an except clause, but ``raise`` with no\nexpressions should be preferred if the exception to be re-raised was\nthe most recently active exception in the current scope.\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information about handling exceptions is in section\n*The try statement*.\n', @@ -59,19 +59,19 @@ 'slicings': '\nSlicings\n********\n\nA slicing selects a range of items in a sequence object (e.g., a\nstring, tuple or list). Slicings may be used as expressions or as\ntargets in assignment or ``del`` statements. The syntax for a\nslicing:\n\n slicing ::= simple_slicing | extended_slicing\n simple_slicing ::= primary "[" short_slice "]"\n extended_slicing ::= primary "[" slice_list "]"\n slice_list ::= slice_item ("," slice_item)* [","]\n slice_item ::= expression | proper_slice | ellipsis\n proper_slice ::= short_slice | long_slice\n short_slice ::= [lower_bound] ":" [upper_bound]\n long_slice ::= short_slice ":" [stride]\n lower_bound ::= expression\n upper_bound ::= expression\n stride ::= expression\n ellipsis ::= "..."\n\nThere is ambiguity in the formal syntax here: anything that looks like\nan expression list also looks like a slice list, so any subscription\ncan be interpreted as a slicing. Rather than further complicating the\nsyntax, this is disambiguated by defining that in this case the\ninterpretation as a subscription takes priority over the\ninterpretation as a slicing (this is the case if the slice list\ncontains no proper slice nor ellipses). Similarly, when the slice\nlist has exactly one short slice and no trailing comma, the\ninterpretation as a simple slicing takes priority over that as an\nextended slicing.\n\nThe semantics for a simple slicing are as follows. The primary must\nevaluate to a sequence object. The lower and upper bound expressions,\nif present, must evaluate to plain integers; defaults are zero and the\n``sys.maxint``, respectively. If either bound is negative, the\nsequence\'s length is added to it. The slicing now selects all items\nwith index *k* such that ``i <= k < j`` where *i* and *j* are the\nspecified lower and upper bounds. This may be an empty sequence. It\nis not an error if *i* or *j* lie outside the range of valid indexes\n(such items don\'t exist so they aren\'t selected).\n\nThe semantics for an extended slicing are as follows. The primary\nmust evaluate to a mapping object, and it is indexed with a key that\nis constructed from the slice list, as follows. If the slice list\ncontains at least one comma, the key is a tuple containing the\nconversion of the slice items; otherwise, the conversion of the lone\nslice item is the key. The conversion of a slice item that is an\nexpression is that expression. The conversion of an ellipsis slice\nitem is the built-in ``Ellipsis`` object. The conversion of a proper\nslice is a slice object (see section *The standard type hierarchy*)\nwhose ``start``, ``stop`` and ``step`` attributes are the values of\nthe expressions given as lower bound, upper bound and stride,\nrespectively, substituting ``None`` for missing expressions.\n', 'specialattrs': '\nSpecial Attributes\n******************\n\nThe implementation adds a few special read-only attributes to several\nobject types, where they are relevant. Some of these are not reported\nby the ``dir()`` built-in function.\n\nobject.__dict__\n\n A dictionary or other mapping object used to store an object\'s\n (writable) attributes.\n\nobject.__methods__\n\n Deprecated since version 2.2: Use the built-in function ``dir()``\n to get a list of an object\'s attributes. This attribute is no\n longer available.\n\nobject.__members__\n\n Deprecated since version 2.2: Use the built-in function ``dir()``\n to get a list of an object\'s attributes. This attribute is no\n longer available.\n\ninstance.__class__\n\n The class to which a class instance belongs.\n\nclass.__bases__\n\n The tuple of base classes of a class object.\n\nclass.__name__\n\n The name of the class or type.\n\nThe following attributes are only supported by *new-style class*es.\n\nclass.__mro__\n\n This attribute is a tuple of classes that are considered when\n looking for base classes during method resolution.\n\nclass.mro()\n\n This method can be overridden by a metaclass to customize the\n method resolution order for its instances. It is called at class\n instantiation, and its result is stored in ``__mro__``.\n\nclass.__subclasses__()\n\n Each new-style class keeps a list of weak references to its\n immediate subclasses. This method returns a list of all those\n references still alive. Example:\n\n >>> int.__subclasses__()\n []\n\n-[ Footnotes ]-\n\n[1] Additional information on these special methods may be found in\n the Python Reference Manual (*Basic customization*).\n\n[2] As a consequence, the list ``[1, 2]`` is considered equal to\n ``[1.0, 2.0]``, and similarly for tuples.\n\n[3] They must have since the parser can\'t tell the type of the\n operands.\n\n[4] Cased characters are those with general category property being\n one of "Lu" (Letter, uppercase), "Ll" (Letter, lowercase), or "Lt"\n (Letter, titlecase).\n\n[5] To format only a tuple you should therefore provide a singleton\n tuple whose only element is the tuple to be formatted.\n\n[6] The advantage of leaving the newline on is that returning an empty\n string is then an unambiguous EOF indication. It is also possible\n (in cases where it might matter, for example, if you want to make\n an exact copy of a file while scanning its lines) to tell whether\n the last line of a file ended in a newline or not (yes this\n happens!).\n', 'specialnames': '\nSpecial method names\n********************\n\nA class can implement certain operations that are invoked by special\nsyntax (such as arithmetic operations or subscripting and slicing) by\ndefining methods with special names. This is Python\'s approach to\n*operator overloading*, allowing classes to define their own behavior\nwith respect to language operators. For instance, if a class defines\na method named ``__getitem__()``, and ``x`` is an instance of this\nclass, then ``x[i]`` is roughly equivalent to ``x.__getitem__(i)`` for\nold-style classes and ``type(x).__getitem__(x, i)`` for new-style\nclasses. Except where mentioned, attempts to execute an operation\nraise an exception when no appropriate method is defined (typically\n``AttributeError`` or ``TypeError``).\n\nWhen implementing a class that emulates any built-in type, it is\nimportant that the emulation only be implemented to the degree that it\nmakes sense for the object being modelled. For example, some\nsequences may work well with retrieval of individual elements, but\nextracting a slice may not make sense. (One example of this is the\n``NodeList`` interface in the W3C\'s Document Object Model.)\n\n\nBasic customization\n===================\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. ``__new__()`` is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of ``__new__()`` should be the new object instance (usually\n an instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s ``__new__()`` method using\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If ``__new__()`` returns an instance of *cls*, then the new\n instance\'s ``__init__()`` method will be invoked like\n ``__init__(self[, ...])``, where *self* is the new instance and the\n remaining arguments are the same as were passed to ``__new__()``.\n\n If ``__new__()`` does not return an instance of *cls*, then the new\n instance\'s ``__init__()`` method will not be invoked.\n\n ``__new__()`` is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n ``__init__()`` method, the derived class\'s ``__init__()`` method,\n if any, must explicitly call it to ensure proper initialization of\n the base class part of the instance; for example:\n ``BaseClass.__init__(self, [args...])``. As a special constraint\n on constructors, no value may be returned; doing so will cause a\n ``TypeError`` to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a ``__del__()`` method,\n the derived class\'s ``__del__()`` method, if any, must explicitly\n call it to ensure proper deletion of the base class part of the\n instance. Note that it is possible (though not recommended!) for\n the ``__del__()`` method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n ``__del__()`` methods are called for objects that still exist when\n the interpreter exits.\n\n Note: ``del x`` doesn\'t directly call ``x.__del__()`` --- the former\n decrements the reference count for ``x`` by one, and the latter\n is only called when ``x``\'s reference count reaches zero. Some\n common situations that may prevent the reference count of an\n object from going to zero include: circular references between\n objects (e.g., a doubly-linked list or a tree data structure with\n parent and child pointers); a reference to the object on the\n stack frame of a function that caught an exception (the traceback\n stored in ``sys.exc_traceback`` keeps the stack frame alive); or\n a reference to the object on the stack frame that raised an\n unhandled exception in interactive mode (the traceback stored in\n ``sys.last_traceback`` keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing ``None`` in\n ``sys.exc_traceback`` or ``sys.last_traceback``. Circular\n references which are garbage are detected when the option cycle\n detector is enabled (it\'s on by default), but can only be cleaned\n up if there are no Python-level ``__del__()`` methods involved.\n Refer to the documentation for the ``gc`` module for more\n information about how ``__del__()`` methods are handled by the\n cycle detector, particularly the description of the ``garbage``\n value.\n\n Warning: Due to the precarious circumstances under which ``__del__()``\n methods are invoked, exceptions that occur during their execution\n are ignored, and a warning is printed to ``sys.stderr`` instead.\n Also, when ``__del__()`` is invoked in response to a module being\n deleted (e.g., when execution of the program is done), other\n globals referenced by the ``__del__()`` method may already have\n been deleted or in the process of being torn down (e.g. the\n import machinery shutting down). For this reason, ``__del__()``\n methods should do the absolute minimum needed to maintain\n external invariants. Starting with version 1.5, Python\n guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the ``__del__()`` method is called.\n\n See also the *-R* command-line option.\n\nobject.__repr__(self)\n\n Called by the ``repr()`` built-in function and by string\n conversions (reverse quotes) to compute the "official" string\n representation of an object. If at all possible, this should look\n like a valid Python expression that could be used to recreate an\n object with the same value (given an appropriate environment). If\n this is not possible, a string of the form ``<...some useful\n description...>`` should be returned. The return value must be a\n string object. If a class defines ``__repr__()`` but not\n ``__str__()``, then ``__repr__()`` is also used when an "informal"\n string representation of instances of that class is required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by the ``str()`` built-in function and by the ``print``\n statement to compute the "informal" string representation of an\n object. This differs from ``__repr__()`` in that it does not have\n to be a valid Python expression: a more convenient or concise\n representation may be used instead. The return value must be a\n string object.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n New in version 2.1.\n\n These are the so-called "rich comparison" methods, and are called\n for comparison operators in preference to ``__cmp__()`` below. The\n correspondence between operator symbols and method names is as\n follows: ``xy`` call ``x.__ne__(y)``, ``x>y`` calls ``x.__gt__(y)``, and\n ``x>=y`` calls ``x.__ge__(y)``.\n\n A rich comparison method may return the singleton\n ``NotImplemented`` if it does not implement the operation for a\n given pair of arguments. By convention, ``False`` and ``True`` are\n returned for a successful comparison. However, these methods can\n return any value, so if the comparison operator is used in a\n Boolean context (e.g., in the condition of an ``if`` statement),\n Python will call ``bool()`` on the value to determine if the result\n is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\n Accordingly, when defining ``__eq__()``, one should also define\n ``__ne__()`` so that the operators will behave as expected. See\n the paragraph on ``__hash__()`` for some important notes on\n creating *hashable* objects which support custom comparison\n operations and are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\n other\'s reflection, ``__le__()`` and ``__ge__()`` are each other\'s\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\n reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see ``functools.total_ordering()``.\n\nobject.__cmp__(self, other)\n\n Called by comparison operations if rich comparison (see above) is\n not defined. Should return a negative integer if ``self < other``,\n zero if ``self == other``, a positive integer if ``self > other``.\n If no ``__cmp__()``, ``__eq__()`` or ``__ne__()`` operation is\n defined, class instances are compared by object identity\n ("address"). See also the description of ``__hash__()`` for some\n important notes on creating *hashable* objects which support custom\n comparison operations and are usable as dictionary keys. (Note: the\n restriction that exceptions are not propagated by ``__cmp__()`` has\n been removed since Python 1.5.)\n\nobject.__rcmp__(self, other)\n\n Changed in version 2.1: No longer supported.\n\nobject.__hash__(self)\n\n Called by built-in function ``hash()`` and for operations on\n members of hashed collections including ``set``, ``frozenset``, and\n ``dict``. ``__hash__()`` should return an integer. The only\n required property is that objects which compare equal have the same\n hash value; it is advised to somehow mix together (e.g. using\n exclusive or) the hash values for the components of the object that\n also play a part in comparison of objects.\n\n If a class does not define a ``__cmp__()`` or ``__eq__()`` method\n it should not define a ``__hash__()`` operation either; if it\n defines ``__cmp__()`` or ``__eq__()`` but not ``__hash__()``, its\n instances will not be usable in hashed collections. If a class\n defines mutable objects and implements a ``__cmp__()`` or\n ``__eq__()`` method, it should not implement ``__hash__()``, since\n hashable collection implementations require that a object\'s hash\n value is immutable (if the object\'s hash value changes, it will be\n in the wrong hash bucket).\n\n User-defined classes have ``__cmp__()`` and ``__hash__()`` methods\n by default; with them, all objects compare unequal (except with\n themselves) and ``x.__hash__()`` returns ``id(x)``.\n\n Classes which inherit a ``__hash__()`` method from a parent class\n but change the meaning of ``__cmp__()`` or ``__eq__()`` such that\n the hash value returned is no longer appropriate (e.g. by switching\n to a value-based concept of equality instead of the default\n identity based equality) can explicitly flag themselves as being\n unhashable by setting ``__hash__ = None`` in the class definition.\n Doing so means that not only will instances of the class raise an\n appropriate ``TypeError`` when a program attempts to retrieve their\n hash value, but they will also be correctly identified as\n unhashable when checking ``isinstance(obj, collections.Hashable)``\n (unlike classes which define their own ``__hash__()`` to explicitly\n raise ``TypeError``).\n\n Changed in version 2.5: ``__hash__()`` may now also return a long\n integer object; the 32-bit integer is then derived from the hash of\n that object.\n\n Changed in version 2.6: ``__hash__`` may now be set to ``None`` to\n explicitly flag instances of a class as unhashable.\n\nobject.__nonzero__(self)\n\n Called to implement truth value testing and the built-in operation\n ``bool()``; should return ``False`` or ``True``, or their integer\n equivalents ``0`` or ``1``. When this method is not defined,\n ``__len__()`` is called, if it is defined, and the object is\n considered true if its result is nonzero. If a class defines\n neither ``__len__()`` nor ``__nonzero__()``, all its instances are\n considered true.\n\nobject.__unicode__(self)\n\n Called to implement ``unicode()`` built-in; should return a Unicode\n object. When this method is not defined, string conversion is\n attempted, and the result of string conversion is converted to\n Unicode using the system default encoding.\n\n\nCustomizing attribute access\n============================\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of ``x.name``)\nfor class instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for ``self``). ``name`` is the attribute name.\n This method should return the (computed) attribute value or raise\n an ``AttributeError`` exception.\n\n Note that if the attribute is found through the normal mechanism,\n ``__getattr__()`` is not called. (This is an intentional asymmetry\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\n for efficiency reasons and because otherwise ``__getattr__()``\n would have no way to access other attributes of the instance. Note\n that at least for instance variables, you can fake total control by\n not inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n ``__getattribute__()`` method below for a way to actually get total\n control in new-style classes.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If ``__setattr__()`` wants to assign to an instance attribute, it\n should not simply execute ``self.name = value`` --- this would\n cause a recursive call to itself. Instead, it should insert the\n value in the dictionary of instance attributes, e.g.,\n ``self.__dict__[name] = value``. For new-style classes, rather\n than accessing the instance dictionary, it should call the base\n class method with the same name, for example,\n ``object.__setattr__(self, name, value)``.\n\nobject.__delattr__(self, name)\n\n Like ``__setattr__()`` but for attribute deletion instead of\n assignment. This should only be implemented if ``del obj.name`` is\n meaningful for the object.\n\n\nMore attribute access for new-style classes\n-------------------------------------------\n\nThe following methods only apply to new-style classes.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines\n ``__getattr__()``, the latter will not be called unless\n ``__getattribute__()`` either calls it explicitly or raises an\n ``AttributeError``. This method should return the (computed)\n attribute value or raise an ``AttributeError`` exception. In order\n to avoid infinite recursion in this method, its implementation\n should always call the base class method with the same name to\n access any attributes it needs, for example,\n ``object.__getattribute__(self, name)``.\n\n Note: This method may still be bypassed when looking up special methods\n as the result of implicit invocation via language syntax or\n built-in functions. See *Special method lookup for new-style\n classes*.\n\n\nImplementing Descriptors\n------------------------\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' ``__dict__``.\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or ``None`` when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an\n ``AttributeError`` exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\n\nInvoking Descriptors\n--------------------\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\n``__delete__()``. If any of those methods are defined for an object,\nit is said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, ``a.x`` has a\nlookup chain starting with ``a.__dict__[\'x\']``, then\n``type(a).__dict__[\'x\']``, and continuing through the base classes of\n``type(a)`` excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called. Note that descriptors are only invoked for new\nstyle objects or classes (ones that subclass ``object()`` or\n``type()``).\n\nThe starting point for descriptor invocation is a binding, ``a.x``.\nHow the arguments are assembled depends on ``a``:\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: ``x.__get__(a)``.\n\nInstance Binding\n If binding to a new-style object instance, ``a.x`` is transformed\n into the call: ``type(a).__dict__[\'x\'].__get__(a, type(a))``.\n\nClass Binding\n If binding to a new-style class, ``A.x`` is transformed into the\n call: ``A.__dict__[\'x\'].__get__(None, A)``.\n\nSuper Binding\n If ``a`` is an instance of ``super``, then the binding ``super(B,\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\n ``A`` immediately preceding ``B`` and then invokes the descriptor\n with the call: ``A.__dict__[\'m\'].__get__(obj, obj.__class__)``.\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\nIf it does not define ``__get__()``, then accessing the attribute will\nreturn the descriptor object itself unless there is a value in the\nobject\'s instance dictionary. If the descriptor defines ``__set__()``\nand/or ``__delete__()``, it is a data descriptor; if it defines\nneither, it is a non-data descriptor. Normally, data descriptors\ndefine both ``__get__()`` and ``__set__()``, while non-data\ndescriptors have just the ``__get__()`` method. Data descriptors with\n``__set__()`` and ``__get__()`` defined always override a redefinition\nin an instance dictionary. In contrast, non-data descriptors can be\noverridden by instances.\n\nPython methods (including ``staticmethod()`` and ``classmethod()``)\nare implemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe ``property()`` function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n---------\n\nBy default, instances of both old and new-style classes have a\ndictionary for attribute storage. This wastes space for objects\nhaving very few instance variables. The space consumption can become\nacute when creating large numbers of instances.\n\nThe default can be overridden by defining *__slots__* in a new-style\nclass definition. The *__slots__* declaration takes a sequence of\ninstance variables and reserves just enough space in each instance to\nhold a value for each variable. Space is saved because *__dict__* is\nnot created for each instance.\n\n__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n new-style class, *__slots__* reserves space for the declared\n variables and prevents the automatic creation of *__dict__* and\n *__weakref__* for each instance.\n\n New in version 2.2.\n\nNotes on using *__slots__*\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises ``AttributeError``. If\n dynamic assignment of new variables is desired, then add\n ``\'__dict__\'`` to the sequence of strings in the *__slots__*\n declaration.\n\n Changed in version 2.3: Previously, adding ``\'__dict__\'`` to the\n *__slots__* declaration would not enable the assignment of new\n attributes not specifically listed in the sequence of instance\n variable names.\n\n* Without a *__weakref__* variable for each instance, classes defining\n *__slots__* do not support weak references to its instances. If weak\n reference support is needed, then add ``\'__weakref__\'`` to the\n sequence of strings in the *__slots__* declaration.\n\n Changed in version 2.3: Previously, adding ``\'__weakref__\'`` to the\n *__slots__* declaration would not enable support for weak\n references.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the instance\n variable defined by the base class slot is inaccessible (except by\n retrieving its descriptor directly from the base class). This\n renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as ``long``, ``str`` and\n ``tuple``.\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\n also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n\n Changed in version 2.6: Previously, *__class__* assignment raised an\n error if either new or old class had *__slots__*.\n\n\nCustomizing class creation\n==========================\n\nBy default, new-style classes are constructed using ``type()``. A\nclass definition is read into a separate namespace and the value of\nclass name is bound to the result of ``type(name, bases, dict)``.\n\nWhen the class definition is read, if *__metaclass__* is defined then\nthe callable assigned to it will be called instead of ``type()``. This\nallows classes or functions to be written which monitor or alter the\nclass creation process:\n\n* Modifying the class dictionary prior to the class being created.\n\n* Returning an instance of another class -- essentially performing the\n role of a factory function.\n\nThese steps will have to be performed in the metaclass\'s ``__new__()``\nmethod -- ``type.__new__()`` can then be called from this method to\ncreate a class with different properties. This example adds a new\nelement to the class dictionary before creating the class:\n\n class metacls(type):\n def __new__(mcs, name, bases, dict):\n dict[\'foo\'] = \'metacls was here\'\n return type.__new__(mcs, name, bases, dict)\n\nYou can of course also override other class methods (or add new\nmethods); for example defining a custom ``__call__()`` method in the\nmetaclass allows custom behavior when the class is called, e.g. not\nalways creating a new instance.\n\n__metaclass__\n\n This variable can be any callable accepting arguments for ``name``,\n ``bases``, and ``dict``. Upon class creation, the callable is used\n instead of the built-in ``type()``.\n\n New in version 2.2.\n\nThe appropriate metaclass is determined by the following precedence\nrules:\n\n* If ``dict[\'__metaclass__\']`` exists, it is used.\n\n* Otherwise, if there is at least one base class, its metaclass is\n used (this looks for a *__class__* attribute first and if not found,\n uses its type).\n\n* Otherwise, if a global variable named __metaclass__ exists, it is\n used.\n\n* Otherwise, the old-style, classic metaclass (types.ClassType) is\n used.\n\nThe potential uses for metaclasses are boundless. Some ideas that have\nbeen explored including logging, interface checking, automatic\ndelegation, automatic property creation, proxies, frameworks, and\nautomatic resource locking/synchronization.\n\n\nCustomizing instance and subclass checks\n========================================\n\nNew in version 2.6.\n\nThe following methods are used to override the default behavior of the\n``isinstance()`` and ``issubclass()`` built-in functions.\n\nIn particular, the metaclass ``abc.ABCMeta`` implements these methods\nin order to allow the addition of Abstract Base Classes (ABCs) as\n"virtual base classes" to any class or type (including built-in\ntypes), including other ABCs.\n\nclass.__instancecheck__(self, instance)\n\n Return true if *instance* should be considered a (direct or\n indirect) instance of *class*. If defined, called to implement\n ``isinstance(instance, class)``.\n\nclass.__subclasscheck__(self, subclass)\n\n Return true if *subclass* should be considered a (direct or\n indirect) subclass of *class*. If defined, called to implement\n ``issubclass(subclass, class)``.\n\nNote that these methods are looked up on the type (metaclass) of a\nclass. They cannot be defined as class methods in the actual class.\nThis is consistent with the lookup of special methods that are called\non instances, only in this case the instance is itself a class.\n\nSee also:\n\n **PEP 3119** - Introducing Abstract Base Classes\n Includes the specification for customizing ``isinstance()`` and\n ``issubclass()`` behavior through ``__instancecheck__()`` and\n ``__subclasscheck__()``, with motivation for this functionality\n in the context of adding Abstract Base Classes (see the ``abc``\n module) to the language.\n\n\nEmulating callable objects\n==========================\n\nobject.__call__(self[, args...])\n\n Called when the instance is "called" as a function; if this method\n is defined, ``x(arg1, arg2, ...)`` is a shorthand for\n ``x.__call__(arg1, arg2, ...)``.\n\n\nEmulating container types\n=========================\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which ``0 <= k < N``\nwhere *N* is the length of the sequence, or slice objects, which\ndefine a range of items. (For backwards compatibility, the method\n``__getslice__()`` (see below) can also be defined to handle simple,\nbut not extended slices.) It is also recommended that mappings provide\nthe methods ``keys()``, ``values()``, ``items()``, ``has_key()``,\n``get()``, ``clear()``, ``setdefault()``, ``iterkeys()``,\n``itervalues()``, ``iteritems()``, ``pop()``, ``popitem()``,\n``copy()``, and ``update()`` behaving similar to those for Python\'s\nstandard dictionary objects. The ``UserDict`` module provides a\n``DictMixin`` class to help create those methods from a base set of\n``__getitem__()``, ``__setitem__()``, ``__delitem__()``, and\n``keys()``. Mutable sequences should provide methods ``append()``,\n``count()``, ``index()``, ``extend()``, ``insert()``, ``pop()``,\n``remove()``, ``reverse()`` and ``sort()``, like Python standard list\nobjects. Finally, sequence types should implement addition (meaning\nconcatenation) and multiplication (meaning repetition) by defining the\nmethods ``__add__()``, ``__radd__()``, ``__iadd__()``, ``__mul__()``,\n``__rmul__()`` and ``__imul__()`` described below; they should not\ndefine ``__coerce__()`` or other numerical operators. It is\nrecommended that both mappings and sequences implement the\n``__contains__()`` method to allow efficient use of the ``in``\noperator; for mappings, ``in`` should be equivalent of ``has_key()``;\nfor sequences, it should search through the values. It is further\nrecommended that both mappings and sequences implement the\n``__iter__()`` method to allow efficient iteration through the\ncontainer; for mappings, ``__iter__()`` should be the same as\n``iterkeys()``; for sequences, it should iterate through the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function ``len()``. Should return\n the length of the object, an integer ``>=`` 0. Also, an object\n that doesn\'t define a ``__nonzero__()`` method and whose\n ``__len__()`` method returns zero is considered to be false in a\n Boolean context.\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of ``self[key]``. For sequence\n types, the accepted keys should be integers and slice objects.\n Note that the special interpretation of negative indexes (if the\n class wishes to emulate a sequence type) is up to the\n ``__getitem__()`` method. If *key* is of an inappropriate type,\n ``TypeError`` may be raised; if of a value outside the set of\n indexes for the sequence (after any special interpretation of\n negative values), ``IndexError`` should be raised. For mapping\n types, if *key* is missing (not in the container), ``KeyError``\n should be raised.\n\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the ``__getitem__()`` method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of ``self[key]``. Same note as for\n ``__getitem__()``. This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the ``__getitem__()``\n method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method ``iterkeys()``.\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the ``reversed()`` built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the ``__reversed__()`` method is not provided, the\n ``reversed()`` built-in will fall back to using the sequence\n protocol (``__len__()`` and ``__getitem__()``). Objects that\n support the sequence protocol should only provide\n ``__reversed__()`` if they can provide an implementation that is\n more efficient than the one provided by ``reversed()``.\n\n New in version 2.6.\n\nThe membership test operators (``in`` and ``not in``) are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don\'t define ``__contains__()``, the membership\n test first tries iteration via ``__iter__()``, then the old\n sequence iteration protocol via ``__getitem__()``, see *this\n section in the language reference*.\n\n\nAdditional methods for emulation of sequence types\n==================================================\n\nThe following optional methods can be defined to further emulate\nsequence objects. Immutable sequences methods should at most only\ndefine ``__getslice__()``; mutable sequences might define all three\nmethods.\n\nobject.__getslice__(self, i, j)\n\n Deprecated since version 2.0: Support slice objects as parameters\n to the ``__getitem__()`` method. (However, built-in types in\n CPython currently still implement ``__getslice__()``. Therefore,\n you have to override it in derived classes when implementing\n slicing.)\n\n Called to implement evaluation of ``self[i:j]``. The returned\n object should be of the same type as *self*. Note that missing *i*\n or *j* in the slice expression are replaced by zero or\n ``sys.maxint``, respectively. If negative indexes are used in the\n slice, the length of the sequence is added to that index. If the\n instance does not implement the ``__len__()`` method, an\n ``AttributeError`` is raised. No guarantee is made that indexes\n adjusted this way are not still negative. Indexes which are\n greater than the length of the sequence are not modified. If no\n ``__getslice__()`` is found, a slice object is created instead, and\n passed to ``__getitem__()`` instead.\n\nobject.__setslice__(self, i, j, sequence)\n\n Called to implement assignment to ``self[i:j]``. Same notes for *i*\n and *j* as for ``__getslice__()``.\n\n This method is deprecated. If no ``__setslice__()`` is found, or\n for extended slicing of the form ``self[i:j:k]``, a slice object is\n created, and passed to ``__setitem__()``, instead of\n ``__setslice__()`` being called.\n\nobject.__delslice__(self, i, j)\n\n Called to implement deletion of ``self[i:j]``. Same notes for *i*\n and *j* as for ``__getslice__()``. This method is deprecated. If no\n ``__delslice__()`` is found, or for extended slicing of the form\n ``self[i:j:k]``, a slice object is created, and passed to\n ``__delitem__()``, instead of ``__delslice__()`` being called.\n\nNotice that these methods are only invoked when a single slice with a\nsingle colon is used, and the slice method is available. For slice\noperations involving extended slice notation, or in absence of the\nslice methods, ``__getitem__()``, ``__setitem__()`` or\n``__delitem__()`` is called with a slice object as argument.\n\nThe following example demonstrate how to make your program or module\ncompatible with earlier versions of Python (assuming that methods\n``__getitem__()``, ``__setitem__()`` and ``__delitem__()`` support\nslice objects as arguments):\n\n class MyClass:\n ...\n def __getitem__(self, index):\n ...\n def __setitem__(self, index, value):\n ...\n def __delitem__(self, index):\n ...\n\n if sys.version_info < (2, 0):\n # They won\'t be defined if version is at least 2.0 final\n\n def __getslice__(self, i, j):\n return self[max(0, i):max(0, j):]\n def __setslice__(self, i, j, seq):\n self[max(0, i):max(0, j):] = seq\n def __delslice__(self, i, j):\n del self[max(0, i):max(0, j):]\n ...\n\nNote the calls to ``max()``; these are necessary because of the\nhandling of negative indices before the ``__*slice__()`` methods are\ncalled. When negative indexes are used, the ``__*item__()`` methods\nreceive them as provided, but the ``__*slice__()`` methods get a\n"cooked" form of the index values. For each negative index value, the\nlength of the sequence is added to the index before calling the method\n(which may still result in a negative index); this is the customary\nhandling of negative indexes by the built-in sequence types, and the\n``__*item__()`` methods are expected to do this as well. However,\nsince they should already be doing that, negative indexes cannot be\npassed in; they must be constrained to the bounds of the sequence\nbefore being passed to the ``__*item__()`` methods. Calling ``max(0,\ni)`` conveniently returns the proper value.\n\n\nEmulating numeric types\n=======================\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``//``, ``%``, ``divmod()``,\n ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``, ``|``). For\n instance, to evaluate the expression ``x + y``, where *x* is an\n instance of a class that has an ``__add__()`` method,\n ``x.__add__(y)`` is called. The ``__divmod__()`` method should be\n the equivalent to using ``__floordiv__()`` and ``__mod__()``; it\n should not be related to ``__truediv__()`` (described below). Note\n that ``__pow__()`` should be defined to accept an optional third\n argument if the ternary version of the built-in ``pow()`` function\n is to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return ``NotImplemented``.\n\nobject.__div__(self, other)\nobject.__truediv__(self, other)\n\n The division operator (``/``) is implemented by these methods. The\n ``__truediv__()`` method is used when ``__future__.division`` is in\n effect, otherwise ``__div__()`` is used. If only one of these two\n methods is defined, the object will not support division in the\n alternate context; ``TypeError`` will be raised instead.\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rdiv__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations (``+``, ``-``, ``*``, ``/``, ``%``, ``divmod()``,\n ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``, ``|``) with\n reflected (swapped) operands. These functions are only called if\n the left operand does not support the corresponding operation and\n the operands are of different types. [2] For instance, to evaluate\n the expression ``x - y``, where *y* is an instance of a class that\n has an ``__rsub__()`` method, ``y.__rsub__(x)`` is called if\n ``x.__sub__(y)`` returns *NotImplemented*.\n\n Note that ternary ``pow()`` will not try calling ``__rpow__()``\n (the coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left operand\'s\n type and that subclass provides the reflected method for the\n operation, this method will be called before the left operand\'s\n non-reflected method. This behavior allows subclasses to\n override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__idiv__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments (``+=``, ``-=``, ``*=``, ``/=``, ``//=``, ``%=``,\n ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods\n should attempt to do the operation in-place (modifying *self*) and\n return the result (which could be, but does not have to be,\n *self*). If a specific method is not defined, the augmented\n assignment falls back to the normal methods. For instance, to\n execute the statement ``x += y``, where *x* is an instance of a\n class that has an ``__iadd__()`` method, ``x.__iadd__(y)`` is\n called. If *x* is an instance of a class that does not define a\n ``__iadd__()`` method, ``x.__add__(y)`` and ``y.__radd__(x)`` are\n considered, as with the evaluation of ``x + y``.\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations (``-``, ``+``,\n ``abs()`` and ``~``).\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__long__(self)\nobject.__float__(self)\n\n Called to implement the built-in functions ``complex()``,\n ``int()``, ``long()``, and ``float()``. Should return a value of\n the appropriate type.\n\nobject.__oct__(self)\nobject.__hex__(self)\n\n Called to implement the built-in functions ``oct()`` and ``hex()``.\n Should return a string value.\n\nobject.__index__(self)\n\n Called to implement ``operator.index()``. Also called whenever\n Python needs an integer object (such as in slicing). Must return\n an integer (int or long).\n\n New in version 2.5.\n\nobject.__coerce__(self, other)\n\n Called to implement "mixed-mode" numeric arithmetic. Should either\n return a 2-tuple containing *self* and *other* converted to a\n common numeric type, or ``None`` if conversion is impossible. When\n the common type would be the type of ``other``, it is sufficient to\n return ``None``, since the interpreter will also ask the other\n object to attempt a coercion (but sometimes, if the implementation\n of the other type cannot be changed, it is useful to do the\n conversion to the other type here). A return value of\n ``NotImplemented`` is equivalent to returning ``None``.\n\n\nCoercion rules\n==============\n\nThis section used to document the rules for coercion. As the language\nhas evolved, the coercion rules have become hard to document\nprecisely; documenting what one version of one particular\nimplementation does is undesirable. Instead, here are some informal\nguidelines regarding coercion. In Python 3, coercion will not be\nsupported.\n\n* If the left operand of a % operator is a string or Unicode object,\n no coercion takes place and the string formatting operation is\n invoked instead.\n\n* It is no longer recommended to define a coercion operation. Mixed-\n mode operations on types that don\'t define coercion pass the\n original arguments to the operation.\n\n* New-style classes (those derived from ``object``) never invoke the\n ``__coerce__()`` method in response to a binary operator; the only\n time ``__coerce__()`` is invoked is when the built-in function\n ``coerce()`` is called.\n\n* For most intents and purposes, an operator that returns\n ``NotImplemented`` is treated the same as one that is not\n implemented at all.\n\n* Below, ``__op__()`` and ``__rop__()`` are used to signify the\n generic method names corresponding to an operator; ``__iop__()`` is\n used for the corresponding in-place operator. For example, for the\n operator \'``+``\', ``__add__()`` and ``__radd__()`` are used for the\n left and right variant of the binary operator, and ``__iadd__()``\n for the in-place variant.\n\n* For objects *x* and *y*, first ``x.__op__(y)`` is tried. If this is\n not implemented or returns ``NotImplemented``, ``y.__rop__(x)`` is\n tried. If this is also not implemented or returns\n ``NotImplemented``, a ``TypeError`` exception is raised. But see\n the following exception:\n\n* Exception to the previous item: if the left operand is an instance\n of a built-in type or a new-style class, and the right operand is an\n instance of a proper subclass of that type or class and overrides\n the base\'s ``__rop__()`` method, the right operand\'s ``__rop__()``\n method is tried *before* the left operand\'s ``__op__()`` method.\n\n This is done so that a subclass can completely override binary\n operators. Otherwise, the left operand\'s ``__op__()`` method would\n always accept the right operand: when an instance of a given class\n is expected, an instance of a subclass of that class is always\n acceptable.\n\n* When either operand type defines a coercion, this coercion is called\n before that type\'s ``__op__()`` or ``__rop__()`` method is called,\n but no sooner. If the coercion returns an object of a different\n type for the operand whose coercion is invoked, part of the process\n is redone using the new object.\n\n* When an in-place operator (like \'``+=``\') is used, if the left\n operand implements ``__iop__()``, it is invoked without any\n coercion. When the operation falls back to ``__op__()`` and/or\n ``__rop__()``, the normal coercion rules apply.\n\n* In ``x + y``, if *x* is a sequence that implements sequence\n concatenation, sequence concatenation is invoked.\n\n* In ``x * y``, if one operand is a sequence that implements sequence\n repetition, and the other is an integer (``int`` or ``long``),\n sequence repetition is invoked.\n\n* Rich comparisons (implemented by methods ``__eq__()`` and so on)\n never use coercion. Three-way comparison (implemented by\n ``__cmp__()``) does use coercion under the same conditions as other\n binary operations use it.\n\n* In the current implementation, the built-in numeric types ``int``,\n ``long``, ``float``, and ``complex`` do not use coercion. All these\n types implement a ``__coerce__()`` method, for use by the built-in\n ``coerce()`` function.\n\n Changed in version 2.7.\n\n\nWith Statement Context Managers\n===============================\n\nNew in version 2.5.\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a ``with`` statement. The context\nmanager handles the entry into, and the exit from, the desired runtime\ncontext for the execution of the block of code. Context managers are\nnormally invoked using the ``with`` statement (described in section\n*The with statement*), but can also be used by directly invoking their\nmethods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The ``with``\n statement will bind this method\'s return value to the target(s)\n specified in the ``as`` clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be ``None``.\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that ``__exit__()`` methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n ``with`` statement.\n\n\nSpecial method lookup for old-style classes\n===========================================\n\nFor old-style classes, special methods are always looked up in exactly\nthe same way as any other method or attribute. This is the case\nregardless of whether the method is being looked up explicitly as in\n``x.__getitem__(i)`` or implicitly as in ``x[i]``.\n\nThis behaviour means that special methods may exhibit different\nbehaviour for different instances of a single old-style class if the\nappropriate special attributes are set differently:\n\n >>> class C:\n ... pass\n ...\n >>> c1 = C()\n >>> c2 = C()\n >>> c1.__len__ = lambda: 5\n >>> c2.__len__ = lambda: 9\n >>> len(c1)\n 5\n >>> len(c2)\n 9\n\n\nSpecial method lookup for new-style classes\n===========================================\n\nFor new-style classes, implicit invocations of special methods are\nonly guaranteed to work correctly if defined on an object\'s type, not\nin the object\'s instance dictionary. That behaviour is the reason why\nthe following code raises an exception (unlike the equivalent example\nwith old-style classes):\n\n >>> class C(object):\n ... pass\n ...\n >>> c = C()\n >>> c.__len__ = lambda: 5\n >>> len(c)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: object of type \'C\' has no len()\n\nThe rationale behind this behaviour lies with a number of special\nmethods such as ``__hash__()`` and ``__repr__()`` that are implemented\nby all objects, including type objects. If the implicit lookup of\nthese methods used the conventional lookup process, they would fail\nwhen invoked on the type object itself:\n\n >>> 1 .__hash__() == hash(1)\n True\n >>> int.__hash__() == hash(int)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: descriptor \'__hash__\' of \'int\' object needs an argument\n\nIncorrectly attempting to invoke an unbound method of a class in this\nway is sometimes referred to as \'metaclass confusion\', and is avoided\nby bypassing the instance when looking up special methods:\n\n >>> type(1).__hash__(1) == hash(1)\n True\n >>> type(int).__hash__(int) == hash(int)\n True\n\nIn addition to bypassing any instance attributes in the interest of\ncorrectness, implicit special method lookup generally also bypasses\nthe ``__getattribute__()`` method even of the object\'s metaclass:\n\n >>> class Meta(type):\n ... def __getattribute__(*args):\n ... print "Metaclass getattribute invoked"\n ... return type.__getattribute__(*args)\n ...\n >>> class C(object):\n ... __metaclass__ = Meta\n ... def __len__(self):\n ... return 10\n ... def __getattribute__(*args):\n ... print "Class getattribute invoked"\n ... return object.__getattribute__(*args)\n ...\n >>> c = C()\n >>> c.__len__() # Explicit lookup via instance\n Class getattribute invoked\n 10\n >>> type(c).__len__(c) # Explicit lookup via type\n Metaclass getattribute invoked\n 10\n >>> len(c) # Implicit lookup\n 10\n\nBypassing the ``__getattribute__()`` machinery in this fashion\nprovides significant scope for speed optimisations within the\ninterpreter, at the cost of some flexibility in the handling of\nspecial methods (the special method *must* be set on the class object\nitself in order to be consistently invoked by the interpreter).\n\n-[ Footnotes ]-\n\n[1] It *is* possible in some cases to change an object\'s type, under\n certain controlled conditions. It generally isn\'t a good idea\n though, since it can lead to some very strange behaviour if it is\n handled incorrectly.\n\n[2] For operands of the same type, it is assumed that if the non-\n reflected method (such as ``__add__()``) fails the operation is\n not supported, which is why the reflected method is not called.\n', - 'string-methods': '\nString Methods\n**************\n\nBelow are listed the string methods which both 8-bit strings and\nUnicode objects support. Some of them are also available on\n``bytearray`` objects.\n\nIn addition, Python\'s strings support the sequence type methods\ndescribed in the *Sequence Types --- str, unicode, list, tuple,\nbytearray, buffer, xrange* section. To output formatted strings use\ntemplate strings or the ``%`` operator described in the *String\nFormatting Operations* section. Also, see the ``re`` module for string\nfunctions based on regular expressions.\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.decode([encoding[, errors]])\n\n Decodes the string using the codec registered for *encoding*.\n *encoding* defaults to the default string encoding. *errors* may\n be given to set a different error handling scheme. The default is\n ``\'strict\'``, meaning that encoding errors raise ``UnicodeError``.\n Other possible values are ``\'ignore\'``, ``\'replace\'`` and any other\n name registered via ``codecs.register_error()``, see section *Codec\n Base Classes*.\n\n New in version 2.2.\n\n Changed in version 2.3: Support for other error handling schemes\n added.\n\n Changed in version 2.7: Support for keyword arguments added.\n\nstr.encode([encoding[, errors]])\n\n Return an encoded version of the string. Default encoding is the\n current default string encoding. *errors* may be given to set a\n different error handling scheme. The default for *errors* is\n ``\'strict\'``, meaning that encoding errors raise a\n ``UnicodeError``. Other possible values are ``\'ignore\'``,\n ``\'replace\'``, ``\'xmlcharrefreplace\'``, ``\'backslashreplace\'`` and\n any other name registered via ``codecs.register_error()``, see\n section *Codec Base Classes*. For a list of possible encodings, see\n section *Standard Encodings*.\n\n New in version 2.0.\n\n Changed in version 2.3: Support for ``\'xmlcharrefreplace\'`` and\n ``\'backslashreplace\'`` and other error handling schemes added.\n\n Changed in version 2.7: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return ``True`` if the string ends with the specified *suffix*,\n otherwise return ``False``. *suffix* can also be a tuple of\n suffixes to look for. With optional *start*, test beginning at\n that position. With optional *end*, stop comparing at that\n position.\n\n Changed in version 2.5: Accept tuples as *suffix*.\n\nstr.expandtabs([tabsize])\n\n Return a copy of the string where all tab characters are replaced\n by one or more spaces, depending on the current column and the\n given tab size. The column number is reset to zero after each\n newline occurring in the string. If *tabsize* is not given, a tab\n size of ``8`` characters is assumed. This doesn\'t understand other\n non-printing characters or escape sequences.\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` if *sub* is not found.\n\n Note: The ``find()`` method should be used only if you need to know the\n position of *sub*. To check if *sub* is a substring or not, use\n the ``in`` operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces ``{}``. Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\n This method of string formatting is the new standard in Python 3,\n and should be preferred to the ``%`` formatting described in\n *String Formatting Operations* in new code.\n\n New in version 2.6.\n\nstr.index(sub[, start[, end]])\n\n Like ``find()``, but raise ``ValueError`` when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. The separator between elements is the\n string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\n New in version 2.5.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\n is not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\n New in version 2.5.\n\nstr.rsplit([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n ``None``, any whitespace string is a separator. Except for\n splitting from the right, ``rsplit()`` behaves like ``split()``\n which is described in detail below.\n\n New in version 2.4.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.split([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most ``maxsplit+1``\n elements). If *maxsplit* is not specified or ``-1``, then there is\n no limit on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n ``\'1,,2\'.split(\',\')`` returns ``[\'1\', \'\', \'2\']``). The *sep*\n argument may consist of multiple characters (for example,\n ``\'1<>2<>3\'.split(\'<>\')`` returns ``[\'1\', \'2\', \'3\']``). Splitting\n an empty string with a specified separator returns ``[\'\']``.\n\n If *sep* is not specified or is ``None``, a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a ``None`` separator returns\n ``[]``.\n\n For example, ``\' 1 2 3 \'.split()`` returns ``[\'1\', \'2\', \'3\']``,\n and ``\' 1 2 3 \'.split(None, 1)`` returns ``[\'1\', \'2 3 \']``.\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. This method uses the *universal newlines* approach to\n splitting lines. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\n For example, ``\'ab c\\n\\nde fg\\rkl\\r\\n\'.splitlines()`` returns\n ``[\'ab c\', \'\', \'de fg\', \'kl\']``, while the same call with\n ``splitlines(True)`` returns ``[\'ab c\\n\', \'\\n\', \'de fg\\r\',\n \'kl\\r\\n\']``.\n\n Unlike ``split()`` when a delimiter string *sep* is given, this\n method returns an empty list for the empty string, and a terminal\n line break does not result in an extra line.\n\nstr.startswith(prefix[, start[, end]])\n\n Return ``True`` if string starts with the *prefix*, otherwise\n return ``False``. *prefix* can also be a tuple of prefixes to look\n for. With optional *start*, test string beginning at that\n position. With optional *end*, stop comparing string at that\n position.\n\n Changed in version 2.5: Accept tuples as *prefix*.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or ``None``, the\n *chars* argument defaults to removing whitespace. The *chars*\n argument is not a prefix or suffix; rather, all combinations of its\n values are stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n ... lambda mo: mo.group(0)[0].upper() +\n ... mo.group(0)[1:].lower(),\n ... s)\n ...\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.translate(table[, deletechars])\n\n Return a copy of the string where all characters occurring in the\n optional argument *deletechars* are removed, and the remaining\n characters have been mapped through the given translation table,\n which must be a string of length 256.\n\n You can use the ``maketrans()`` helper function in the ``string``\n module to create a translation table. For string objects, set the\n *table* argument to ``None`` for translations that only delete\n characters:\n\n >>> \'read this short text\'.translate(None, \'aeiou\')\n \'rd ths shrt txt\'\n\n New in version 2.6: Support for a ``None`` *table* argument.\n\n For Unicode objects, the ``translate()`` method does not accept the\n optional *deletechars* argument. Instead, it returns a copy of the\n *s* where all characters have been mapped through the given\n translation table which must be a mapping of Unicode ordinals to\n Unicode ordinals, Unicode strings or ``None``. Unmapped characters\n are left untouched. Characters mapped to ``None`` are deleted.\n Note, a more flexible approach is to create a custom character\n mapping codec using the ``codecs`` module (see ``encodings.cp1251``\n for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that ``str.upper().isupper()`` might\n be ``False`` if ``s`` contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to ``len(s)``.\n\n New in version 2.2.2.\n\nThe following methods are present only on unicode objects:\n\nunicode.isnumeric()\n\n Return ``True`` if there are only numeric characters in S,\n ``False`` otherwise. Numeric characters include digit characters,\n and all characters that have the Unicode numeric value property,\n e.g. U+2155, VULGAR FRACTION ONE FIFTH.\n\nunicode.isdecimal()\n\n Return ``True`` if there are only decimal characters in S,\n ``False`` otherwise. Decimal characters include digit characters,\n and all characters that can be used to form decimal-radix numbers,\n e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n', + 'string-methods': '\nString Methods\n**************\n\nBelow are listed the string methods which both 8-bit strings and\nUnicode objects support. Some of them are also available on\n``bytearray`` objects.\n\nIn addition, Python\'s strings support the sequence type methods\ndescribed in the *Sequence Types --- str, unicode, list, tuple,\nbytearray, buffer, xrange* section. To output formatted strings use\ntemplate strings or the ``%`` operator described in the *String\nFormatting Operations* section. Also, see the ``re`` module for string\nfunctions based on regular expressions.\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.decode([encoding[, errors]])\n\n Decodes the string using the codec registered for *encoding*.\n *encoding* defaults to the default string encoding. *errors* may\n be given to set a different error handling scheme. The default is\n ``\'strict\'``, meaning that encoding errors raise ``UnicodeError``.\n Other possible values are ``\'ignore\'``, ``\'replace\'`` and any other\n name registered via ``codecs.register_error()``, see section *Codec\n Base Classes*.\n\n New in version 2.2.\n\n Changed in version 2.3: Support for other error handling schemes\n added.\n\n Changed in version 2.7: Support for keyword arguments added.\n\nstr.encode([encoding[, errors]])\n\n Return an encoded version of the string. Default encoding is the\n current default string encoding. *errors* may be given to set a\n different error handling scheme. The default for *errors* is\n ``\'strict\'``, meaning that encoding errors raise a\n ``UnicodeError``. Other possible values are ``\'ignore\'``,\n ``\'replace\'``, ``\'xmlcharrefreplace\'``, ``\'backslashreplace\'`` and\n any other name registered via ``codecs.register_error()``, see\n section *Codec Base Classes*. For a list of possible encodings, see\n section *Standard Encodings*.\n\n New in version 2.0.\n\n Changed in version 2.3: Support for ``\'xmlcharrefreplace\'`` and\n ``\'backslashreplace\'`` and other error handling schemes added.\n\n Changed in version 2.7: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return ``True`` if the string ends with the specified *suffix*,\n otherwise return ``False``. *suffix* can also be a tuple of\n suffixes to look for. With optional *start*, test beginning at\n that position. With optional *end*, stop comparing at that\n position.\n\n Changed in version 2.5: Accept tuples as *suffix*.\n\nstr.expandtabs([tabsize])\n\n Return a copy of the string where all tab characters are replaced\n by one or more spaces, depending on the current column and the\n given tab size. Tab positions occur every *tabsize* characters\n (default is 8, giving tab positions at columns 0, 8, 16 and so on).\n To expand the string, the current column is set to zero and the\n string is examined character by character. If the character is a\n tab (``\\t``), one or more space characters are inserted in the\n result until the current column is equal to the next tab position.\n (The tab character itself is not copied.) If the character is a\n newline (``\\n``) or return (``\\r``), it is copied and the current\n column is reset to zero. Any other character is copied unchanged\n and the current column is incremented by one regardless of how the\n character is represented when printed.\n\n >>> \'01\\t012\\t0123\\t01234\'.expandtabs()\n \'01 012 0123 01234\'\n >>> \'01\\t012\\t0123\\t01234\'.expandtabs(4)\n \'01 012 0123 01234\'\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` if *sub* is not found.\n\n Note: The ``find()`` method should be used only if you need to know the\n position of *sub*. To check if *sub* is a substring or not, use\n the ``in`` operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces ``{}``. Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\n This method of string formatting is the new standard in Python 3,\n and should be preferred to the ``%`` formatting described in\n *String Formatting Operations* in new code.\n\n New in version 2.6.\n\nstr.index(sub[, start[, end]])\n\n Like ``find()``, but raise ``ValueError`` when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. The separator between elements is the\n string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\n New in version 2.5.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\n is not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\n New in version 2.5.\n\nstr.rsplit([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n ``None``, any whitespace string is a separator. Except for\n splitting from the right, ``rsplit()`` behaves like ``split()``\n which is described in detail below.\n\n New in version 2.4.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.split([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most ``maxsplit+1``\n elements). If *maxsplit* is not specified or ``-1``, then there is\n no limit on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n ``\'1,,2\'.split(\',\')`` returns ``[\'1\', \'\', \'2\']``). The *sep*\n argument may consist of multiple characters (for example,\n ``\'1<>2<>3\'.split(\'<>\')`` returns ``[\'1\', \'2\', \'3\']``). Splitting\n an empty string with a specified separator returns ``[\'\']``.\n\n If *sep* is not specified or is ``None``, a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a ``None`` separator returns\n ``[]``.\n\n For example, ``\' 1 2 3 \'.split()`` returns ``[\'1\', \'2\', \'3\']``,\n and ``\' 1 2 3 \'.split(None, 1)`` returns ``[\'1\', \'2 3 \']``.\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. This method uses the *universal newlines* approach to\n splitting lines. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\n For example, ``\'ab c\\n\\nde fg\\rkl\\r\\n\'.splitlines()`` returns\n ``[\'ab c\', \'\', \'de fg\', \'kl\']``, while the same call with\n ``splitlines(True)`` returns ``[\'ab c\\n\', \'\\n\', \'de fg\\r\',\n \'kl\\r\\n\']``.\n\n Unlike ``split()`` when a delimiter string *sep* is given, this\n method returns an empty list for the empty string, and a terminal\n line break does not result in an extra line.\n\nstr.startswith(prefix[, start[, end]])\n\n Return ``True`` if string starts with the *prefix*, otherwise\n return ``False``. *prefix* can also be a tuple of prefixes to look\n for. With optional *start*, test string beginning at that\n position. With optional *end*, stop comparing string at that\n position.\n\n Changed in version 2.5: Accept tuples as *prefix*.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or ``None``, the\n *chars* argument defaults to removing whitespace. The *chars*\n argument is not a prefix or suffix; rather, all combinations of its\n values are stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n ... lambda mo: mo.group(0)[0].upper() +\n ... mo.group(0)[1:].lower(),\n ... s)\n ...\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.translate(table[, deletechars])\n\n Return a copy of the string where all characters occurring in the\n optional argument *deletechars* are removed, and the remaining\n characters have been mapped through the given translation table,\n which must be a string of length 256.\n\n You can use the ``maketrans()`` helper function in the ``string``\n module to create a translation table. For string objects, set the\n *table* argument to ``None`` for translations that only delete\n characters:\n\n >>> \'read this short text\'.translate(None, \'aeiou\')\n \'rd ths shrt txt\'\n\n New in version 2.6: Support for a ``None`` *table* argument.\n\n For Unicode objects, the ``translate()`` method does not accept the\n optional *deletechars* argument. Instead, it returns a copy of the\n *s* where all characters have been mapped through the given\n translation table which must be a mapping of Unicode ordinals to\n Unicode ordinals, Unicode strings or ``None``. Unmapped characters\n are left untouched. Characters mapped to ``None`` are deleted.\n Note, a more flexible approach is to create a custom character\n mapping codec using the ``codecs`` module (see ``encodings.cp1251``\n for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that ``str.upper().isupper()`` might\n be ``False`` if ``s`` contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to ``len(s)``.\n\n New in version 2.2.2.\n\nThe following methods are present only on unicode objects:\n\nunicode.isnumeric()\n\n Return ``True`` if there are only numeric characters in S,\n ``False`` otherwise. Numeric characters include digit characters,\n and all characters that have the Unicode numeric value property,\n e.g. U+2155, VULGAR FRACTION ONE FIFTH.\n\nunicode.isdecimal()\n\n Return ``True`` if there are only decimal characters in S,\n ``False`` otherwise. Decimal characters include digit characters,\n and all characters that can be used to form decimal-radix numbers,\n e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n', 'strings': '\nString literals\n***************\n\nString literals are described by the following lexical definitions:\n\n stringliteral ::= [stringprefix](shortstring | longstring)\n stringprefix ::= "r" | "u" | "ur" | "R" | "U" | "UR" | "Ur" | "uR"\n | "b" | "B" | "br" | "Br" | "bR" | "BR"\n shortstring ::= "\'" shortstringitem* "\'" | \'"\' shortstringitem* \'"\'\n longstring ::= "\'\'\'" longstringitem* "\'\'\'"\n | \'"""\' longstringitem* \'"""\'\n shortstringitem ::= shortstringchar | escapeseq\n longstringitem ::= longstringchar | escapeseq\n shortstringchar ::= \n longstringchar ::= \n escapeseq ::= "\\" \n\nOne syntactic restriction not indicated by these productions is that\nwhitespace is not allowed between the ``stringprefix`` and the rest of\nthe string literal. The source character set is defined by the\nencoding declaration; it is ASCII if no encoding declaration is given\nin the source file; see section *Encoding declarations*.\n\nIn plain English: String literals can be enclosed in matching single\nquotes (``\'``) or double quotes (``"``). They can also be enclosed in\nmatching groups of three single or double quotes (these are generally\nreferred to as *triple-quoted strings*). The backslash (``\\``)\ncharacter is used to escape characters that otherwise have a special\nmeaning, such as newline, backslash itself, or the quote character.\nString literals may optionally be prefixed with a letter ``\'r\'`` or\n``\'R\'``; such strings are called *raw strings* and use different rules\nfor interpreting backslash escape sequences. A prefix of ``\'u\'`` or\n``\'U\'`` makes the string a Unicode string. Unicode strings use the\nUnicode character set as defined by the Unicode Consortium and ISO\n10646. Some additional escape sequences, described below, are\navailable in Unicode strings. A prefix of ``\'b\'`` or ``\'B\'`` is\nignored in Python 2; it indicates that the literal should become a\nbytes literal in Python 3 (e.g. when code is automatically converted\nwith 2to3). A ``\'u\'`` or ``\'b\'`` prefix may be followed by an ``\'r\'``\nprefix.\n\nIn triple-quoted strings, unescaped newlines and quotes are allowed\n(and are retained), except that three unescaped quotes in a row\nterminate the string. (A "quote" is the character used to open the\nstring, i.e. either ``\'`` or ``"``.)\n\nUnless an ``\'r\'`` or ``\'R\'`` prefix is present, escape sequences in\nstrings are interpreted according to rules similar to those used by\nStandard C. The recognized escape sequences are:\n\n+-------------------+-----------------------------------+---------+\n| Escape Sequence | Meaning | Notes |\n+===================+===================================+=========+\n| ``\\newline`` | Ignored | |\n+-------------------+-----------------------------------+---------+\n| ``\\\\`` | Backslash (``\\``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\\'`` | Single quote (``\'``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\"`` | Double quote (``"``) | |\n+-------------------+-----------------------------------+---------+\n| ``\\a`` | ASCII Bell (BEL) | |\n+-------------------+-----------------------------------+---------+\n| ``\\b`` | ASCII Backspace (BS) | |\n+-------------------+-----------------------------------+---------+\n| ``\\f`` | ASCII Formfeed (FF) | |\n+-------------------+-----------------------------------+---------+\n| ``\\n`` | ASCII Linefeed (LF) | |\n+-------------------+-----------------------------------+---------+\n| ``\\N{name}`` | Character named *name* in the | |\n| | Unicode database (Unicode only) | |\n+-------------------+-----------------------------------+---------+\n| ``\\r`` | ASCII Carriage Return (CR) | |\n+-------------------+-----------------------------------+---------+\n| ``\\t`` | ASCII Horizontal Tab (TAB) | |\n+-------------------+-----------------------------------+---------+\n| ``\\uxxxx`` | Character with 16-bit hex value | (1) |\n| | *xxxx* (Unicode only) | |\n+-------------------+-----------------------------------+---------+\n| ``\\Uxxxxxxxx`` | Character with 32-bit hex value | (2) |\n| | *xxxxxxxx* (Unicode only) | |\n+-------------------+-----------------------------------+---------+\n| ``\\v`` | ASCII Vertical Tab (VT) | |\n+-------------------+-----------------------------------+---------+\n| ``\\ooo`` | Character with octal value *ooo* | (3,5) |\n+-------------------+-----------------------------------+---------+\n| ``\\xhh`` | Character with hex value *hh* | (4,5) |\n+-------------------+-----------------------------------+---------+\n\nNotes:\n\n1. Individual code units which form parts of a surrogate pair can be\n encoded using this escape sequence.\n\n2. Any Unicode character can be encoded this way, but characters\n outside the Basic Multilingual Plane (BMP) will be encoded using a\n surrogate pair if Python is compiled to use 16-bit code units (the\n default). Individual code units which form parts of a surrogate\n pair can be encoded using this escape sequence.\n\n3. As in Standard C, up to three octal digits are accepted.\n\n4. Unlike in Standard C, exactly two hex digits are required.\n\n5. In a string literal, hexadecimal and octal escapes denote the byte\n with the given value; it is not necessary that the byte encodes a\n character in the source character set. In a Unicode literal, these\n escapes denote a Unicode character with the given value.\n\nUnlike Standard C, all unrecognized escape sequences are left in the\nstring unchanged, i.e., *the backslash is left in the string*. (This\nbehavior is useful when debugging: if an escape sequence is mistyped,\nthe resulting output is more easily recognized as broken.) It is also\nimportant to note that the escape sequences marked as "(Unicode only)"\nin the table above fall into the category of unrecognized escapes for\nnon-Unicode string literals.\n\nWhen an ``\'r\'`` or ``\'R\'`` prefix is present, a character following a\nbackslash is included in the string without change, and *all\nbackslashes are left in the string*. For example, the string literal\n``r"\\n"`` consists of two characters: a backslash and a lowercase\n``\'n\'``. String quotes can be escaped with a backslash, but the\nbackslash remains in the string; for example, ``r"\\""`` is a valid\nstring literal consisting of two characters: a backslash and a double\nquote; ``r"\\"`` is not a valid string literal (even a raw string\ncannot end in an odd number of backslashes). Specifically, *a raw\nstring cannot end in a single backslash* (since the backslash would\nescape the following quote character). Note also that a single\nbackslash followed by a newline is interpreted as those two characters\nas part of the string, *not* as a line continuation.\n\nWhen an ``\'r\'`` or ``\'R\'`` prefix is used in conjunction with a\n``\'u\'`` or ``\'U\'`` prefix, then the ``\\uXXXX`` and ``\\UXXXXXXXX``\nescape sequences are processed while *all other backslashes are left\nin the string*. For example, the string literal ``ur"\\u0062\\n"``\nconsists of three Unicode characters: \'LATIN SMALL LETTER B\', \'REVERSE\nSOLIDUS\', and \'LATIN SMALL LETTER N\'. Backslashes can be escaped with\na preceding backslash; however, both remain in the string. As a\nresult, ``\\uXXXX`` escape sequences are only recognized when there are\nan odd number of backslashes.\n', 'subscriptions': '\nSubscriptions\n*************\n\nA subscription selects an item of a sequence (string, tuple or list)\nor mapping (dictionary) object:\n\n subscription ::= primary "[" expression_list "]"\n\nThe primary must evaluate to an object of a sequence or mapping type.\n\nIf the primary is a mapping, the expression list must evaluate to an\nobject whose value is one of the keys of the mapping, and the\nsubscription selects the value in the mapping that corresponds to that\nkey. (The expression list is a tuple except if it has exactly one\nitem.)\n\nIf the primary is a sequence, the expression (list) must evaluate to a\nplain integer. If this value is negative, the length of the sequence\nis added to it (so that, e.g., ``x[-1]`` selects the last item of\n``x``.) The resulting value must be a nonnegative integer less than\nthe number of items in the sequence, and the subscription selects the\nitem whose index is that value (counting from zero).\n\nA string\'s items are characters. A character is not a separate data\ntype but a string of exactly one character.\n', 'truth': "\nTruth Value Testing\n*******************\n\nAny object can be tested for truth value, for use in an ``if`` or\n``while`` condition or as operand of the Boolean operations below. The\nfollowing values are considered false:\n\n* ``None``\n\n* ``False``\n\n* zero of any numeric type, for example, ``0``, ``0L``, ``0.0``,\n ``0j``.\n\n* any empty sequence, for example, ``''``, ``()``, ``[]``.\n\n* any empty mapping, for example, ``{}``.\n\n* instances of user-defined classes, if the class defines a\n ``__nonzero__()`` or ``__len__()`` method, when that method returns\n the integer zero or ``bool`` value ``False``. [1]\n\nAll other values are considered true --- so objects of many types are\nalways true.\n\nOperations and built-in functions that have a Boolean result always\nreturn ``0`` or ``False`` for false and ``1`` or ``True`` for true,\nunless otherwise stated. (Important exception: the Boolean operations\n``or`` and ``and`` always return one of their operands.)\n", 'try': '\nThe ``try`` statement\n*********************\n\nThe ``try`` statement specifies exception handlers and/or cleanup code\nfor a group of statements:\n\n try_stmt ::= try1_stmt | try2_stmt\n try1_stmt ::= "try" ":" suite\n ("except" [expression [("as" | ",") target]] ":" suite)+\n ["else" ":" suite]\n ["finally" ":" suite]\n try2_stmt ::= "try" ":" suite\n "finally" ":" suite\n\nChanged in version 2.5: In previous versions of Python,\n``try``...``except``...``finally`` did not work. ``try``...``except``\nhad to be nested in ``try``...``finally``.\n\nThe ``except`` clause(s) specify one or more exception handlers. When\nno exception occurs in the ``try`` clause, no exception handler is\nexecuted. When an exception occurs in the ``try`` suite, a search for\nan exception handler is started. This search inspects the except\nclauses in turn until one is found that matches the exception. An\nexpression-less except clause, if present, must be last; it matches\nany exception. For an except clause with an expression, that\nexpression is evaluated, and the clause matches the exception if the\nresulting object is "compatible" with the exception. An object is\ncompatible with an exception if it is the class or a base class of the\nexception object, or a tuple containing an item compatible with the\nexception.\n\nIf no except clause matches the exception, the search for an exception\nhandler continues in the surrounding code and on the invocation stack.\n[1]\n\nIf the evaluation of an expression in the header of an except clause\nraises an exception, the original search for a handler is canceled and\na search starts for the new exception in the surrounding code and on\nthe call stack (it is treated as if the entire ``try`` statement\nraised the exception).\n\nWhen a matching except clause is found, the exception is assigned to\nthe target specified in that except clause, if present, and the except\nclause\'s suite is executed. All except clauses must have an\nexecutable block. When the end of this block is reached, execution\ncontinues normally after the entire try statement. (This means that\nif two nested handlers exist for the same exception, and the exception\noccurs in the try clause of the inner handler, the outer handler will\nnot handle the exception.)\n\nBefore an except clause\'s suite is executed, details about the\nexception are assigned to three variables in the ``sys`` module:\n``sys.exc_type`` receives the object identifying the exception;\n``sys.exc_value`` receives the exception\'s parameter;\n``sys.exc_traceback`` receives a traceback object (see section *The\nstandard type hierarchy*) identifying the point in the program where\nthe exception occurred. These details are also available through the\n``sys.exc_info()`` function, which returns a tuple ``(exc_type,\nexc_value, exc_traceback)``. Use of the corresponding variables is\ndeprecated in favor of this function, since their use is unsafe in a\nthreaded program. As of Python 1.5, the variables are restored to\ntheir previous values (before the call) when returning from a function\nthat handled an exception.\n\nThe optional ``else`` clause is executed if and when control flows off\nthe end of the ``try`` clause. [2] Exceptions in the ``else`` clause\nare not handled by the preceding ``except`` clauses.\n\nIf ``finally`` is present, it specifies a \'cleanup\' handler. The\n``try`` clause is executed, including any ``except`` and ``else``\nclauses. If an exception occurs in any of the clauses and is not\nhandled, the exception is temporarily saved. The ``finally`` clause is\nexecuted. If there is a saved exception, it is re-raised at the end\nof the ``finally`` clause. If the ``finally`` clause raises another\nexception or executes a ``return`` or ``break`` statement, the saved\nexception is discarded:\n\n def f():\n try:\n 1/0\n finally:\n return 42\n\n >>> f()\n 42\n\nThe exception information is not available to the program during\nexecution of the ``finally`` clause.\n\nWhen a ``return``, ``break`` or ``continue`` statement is executed in\nthe ``try`` suite of a ``try``...``finally`` statement, the\n``finally`` clause is also executed \'on the way out.\' A ``continue``\nstatement is illegal in the ``finally`` clause. (The reason is a\nproblem with the current implementation --- this restriction may be\nlifted in the future).\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information on using the ``raise`` statement to\ngenerate exceptions may be found in section *The raise statement*.\n', - 'types': '\nThe standard type hierarchy\n***************************\n\nBelow is a list of the types that are built into Python. Extension\nmodules (written in C, Java, or other languages, depending on the\nimplementation) can define additional types. Future versions of\nPython may add types to the type hierarchy (e.g., rational numbers,\nefficiently stored arrays of integers, etc.).\n\nSome of the type descriptions below contain a paragraph listing\n\'special attributes.\' These are attributes that provide access to the\nimplementation and are not intended for general use. Their definition\nmay change in the future.\n\nNone\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name ``None``.\n It is used to signify the absence of a value in many situations,\n e.g., it is returned from functions that don\'t explicitly return\n anything. Its truth value is false.\n\nNotImplemented\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name\n ``NotImplemented``. Numeric methods and rich comparison methods may\n return this value if they do not implement the operation for the\n operands provided. (The interpreter will then try the reflected\n operation, or some other fallback, depending on the operator.) Its\n truth value is true.\n\nEllipsis\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name\n ``Ellipsis``. It is used to indicate the presence of the ``...``\n syntax in a slice. Its truth value is true.\n\n``numbers.Number``\n These are created by numeric literals and returned as results by\n arithmetic operators and arithmetic built-in functions. Numeric\n objects are immutable; once created their value never changes.\n Python numbers are of course strongly related to mathematical\n numbers, but subject to the limitations of numerical representation\n in computers.\n\n Python distinguishes between integers, floating point numbers, and\n complex numbers:\n\n ``numbers.Integral``\n These represent elements from the mathematical set of integers\n (positive and negative).\n\n There are three types of integers:\n\n Plain integers\n These represent numbers in the range -2147483648 through\n 2147483647. (The range may be larger on machines with a\n larger natural word size, but not smaller.) When the result\n of an operation would fall outside this range, the result is\n normally returned as a long integer (in some cases, the\n exception ``OverflowError`` is raised instead). For the\n purpose of shift and mask operations, integers are assumed to\n have a binary, 2\'s complement notation using 32 or more bits,\n and hiding no bits from the user (i.e., all 4294967296\n different bit patterns correspond to different values).\n\n Long integers\n These represent numbers in an unlimited range, subject to\n available (virtual) memory only. For the purpose of shift\n and mask operations, a binary representation is assumed, and\n negative numbers are represented in a variant of 2\'s\n complement which gives the illusion of an infinite string of\n sign bits extending to the left.\n\n Booleans\n These represent the truth values False and True. The two\n objects representing the values False and True are the only\n Boolean objects. The Boolean type is a subtype of plain\n integers, and Boolean values behave like the values 0 and 1,\n respectively, in almost all contexts, the exception being\n that when converted to a string, the strings ``"False"`` or\n ``"True"`` are returned, respectively.\n\n The rules for integer representation are intended to give the\n most meaningful interpretation of shift and mask operations\n involving negative integers and the least surprises when\n switching between the plain and long integer domains. Any\n operation, if it yields a result in the plain integer domain,\n will yield the same result in the long integer domain or when\n using mixed operands. The switch between domains is transparent\n to the programmer.\n\n ``numbers.Real`` (``float``)\n These represent machine-level double precision floating point\n numbers. You are at the mercy of the underlying machine\n architecture (and C or Java implementation) for the accepted\n range and handling of overflow. Python does not support single-\n precision floating point numbers; the savings in processor and\n memory usage that are usually the reason for using these is\n dwarfed by the overhead of using objects in Python, so there is\n no reason to complicate the language with two kinds of floating\n point numbers.\n\n ``numbers.Complex``\n These represent complex numbers as a pair of machine-level\n double precision floating point numbers. The same caveats apply\n as for floating point numbers. The real and imaginary parts of a\n complex number ``z`` can be retrieved through the read-only\n attributes ``z.real`` and ``z.imag``.\n\nSequences\n These represent finite ordered sets indexed by non-negative\n numbers. The built-in function ``len()`` returns the number of\n items of a sequence. When the length of a sequence is *n*, the\n index set contains the numbers 0, 1, ..., *n*-1. Item *i* of\n sequence *a* is selected by ``a[i]``.\n\n Sequences also support slicing: ``a[i:j]`` selects all items with\n index *k* such that *i* ``<=`` *k* ``<`` *j*. When used as an\n expression, a slice is a sequence of the same type. This implies\n that the index set is renumbered so that it starts at 0.\n\n Some sequences also support "extended slicing" with a third "step"\n parameter: ``a[i:j:k]`` selects all items of *a* with index *x*\n where ``x = i + n*k``, *n* ``>=`` ``0`` and *i* ``<=`` *x* ``<``\n *j*.\n\n Sequences are distinguished according to their mutability:\n\n Immutable sequences\n An object of an immutable sequence type cannot change once it is\n created. (If the object contains references to other objects,\n these other objects may be mutable and may be changed; however,\n the collection of objects directly referenced by an immutable\n object cannot change.)\n\n The following types are immutable sequences:\n\n Strings\n The items of a string are characters. There is no separate\n character type; a character is represented by a string of one\n item. Characters represent (at least) 8-bit bytes. The\n built-in functions ``chr()`` and ``ord()`` convert between\n characters and nonnegative integers representing the byte\n values. Bytes with the values 0-127 usually represent the\n corresponding ASCII values, but the interpretation of values\n is up to the program. The string data type is also used to\n represent arrays of bytes, e.g., to hold data read from a\n file.\n\n (On systems whose native character set is not ASCII, strings\n may use EBCDIC in their internal representation, provided the\n functions ``chr()`` and ``ord()`` implement a mapping between\n ASCII and EBCDIC, and string comparison preserves the ASCII\n order. Or perhaps someone can propose a better rule?)\n\n Unicode\n The items of a Unicode object are Unicode code units. A\n Unicode code unit is represented by a Unicode object of one\n item and can hold either a 16-bit or 32-bit value\n representing a Unicode ordinal (the maximum value for the\n ordinal is given in ``sys.maxunicode``, and depends on how\n Python is configured at compile time). Surrogate pairs may\n be present in the Unicode object, and will be reported as two\n separate items. The built-in functions ``unichr()`` and\n ``ord()`` convert between code units and nonnegative integers\n representing the Unicode ordinals as defined in the Unicode\n Standard 3.0. Conversion from and to other encodings are\n possible through the Unicode method ``encode()`` and the\n built-in function ``unicode()``.\n\n Tuples\n The items of a tuple are arbitrary Python objects. Tuples of\n two or more items are formed by comma-separated lists of\n expressions. A tuple of one item (a \'singleton\') can be\n formed by affixing a comma to an expression (an expression by\n itself does not create a tuple, since parentheses must be\n usable for grouping of expressions). An empty tuple can be\n formed by an empty pair of parentheses.\n\n Mutable sequences\n Mutable sequences can be changed after they are created. The\n subscription and slicing notations can be used as the target of\n assignment and ``del`` (delete) statements.\n\n There are currently two intrinsic mutable sequence types:\n\n Lists\n The items of a list are arbitrary Python objects. Lists are\n formed by placing a comma-separated list of expressions in\n square brackets. (Note that there are no special cases needed\n to form lists of length 0 or 1.)\n\n Byte Arrays\n A bytearray object is a mutable array. They are created by\n the built-in ``bytearray()`` constructor. Aside from being\n mutable (and hence unhashable), byte arrays otherwise provide\n the same interface and functionality as immutable bytes\n objects.\n\n The extension module ``array`` provides an additional example of\n a mutable sequence type.\n\nSet types\n These represent unordered, finite sets of unique, immutable\n objects. As such, they cannot be indexed by any subscript. However,\n they can be iterated over, and the built-in function ``len()``\n returns the number of items in a set. Common uses for sets are fast\n membership testing, removing duplicates from a sequence, and\n computing mathematical operations such as intersection, union,\n difference, and symmetric difference.\n\n For set elements, the same immutability rules apply as for\n dictionary keys. Note that numeric types obey the normal rules for\n numeric comparison: if two numbers compare equal (e.g., ``1`` and\n ``1.0``), only one of them can be contained in a set.\n\n There are currently two intrinsic set types:\n\n Sets\n These represent a mutable set. They are created by the built-in\n ``set()`` constructor and can be modified afterwards by several\n methods, such as ``add()``.\n\n Frozen sets\n These represent an immutable set. They are created by the\n built-in ``frozenset()`` constructor. As a frozenset is\n immutable and *hashable*, it can be used again as an element of\n another set, or as a dictionary key.\n\nMappings\n These represent finite sets of objects indexed by arbitrary index\n sets. The subscript notation ``a[k]`` selects the item indexed by\n ``k`` from the mapping ``a``; this can be used in expressions and\n as the target of assignments or ``del`` statements. The built-in\n function ``len()`` returns the number of items in a mapping.\n\n There is currently a single intrinsic mapping type:\n\n Dictionaries\n These represent finite sets of objects indexed by nearly\n arbitrary values. The only types of values not acceptable as\n keys are values containing lists or dictionaries or other\n mutable types that are compared by value rather than by object\n identity, the reason being that the efficient implementation of\n dictionaries requires a key\'s hash value to remain constant.\n Numeric types used for keys obey the normal rules for numeric\n comparison: if two numbers compare equal (e.g., ``1`` and\n ``1.0``) then they can be used interchangeably to index the same\n dictionary entry.\n\n Dictionaries are mutable; they can be created by the ``{...}``\n notation (see section *Dictionary displays*).\n\n The extension modules ``dbm``, ``gdbm``, and ``bsddb`` provide\n additional examples of mapping types.\n\nCallable types\n These are the types to which the function call operation (see\n section *Calls*) can be applied:\n\n User-defined functions\n A user-defined function object is created by a function\n definition (see section *Function definitions*). It should be\n called with an argument list containing the same number of items\n as the function\'s formal parameter list.\n\n Special attributes:\n\n +-------------------------+---------------------------------+-------------+\n | Attribute | Meaning | |\n +=========================+=================================+=============+\n | ``func_doc`` | The function\'s documentation | Writable |\n | | string, or ``None`` if | |\n | | unavailable | |\n +-------------------------+---------------------------------+-------------+\n | ``__doc__`` | Another way of spelling | Writable |\n | | ``func_doc`` | |\n +-------------------------+---------------------------------+-------------+\n | ``func_name`` | The function\'s name | Writable |\n +-------------------------+---------------------------------+-------------+\n | ``__name__`` | Another way of spelling | Writable |\n | | ``func_name`` | |\n +-------------------------+---------------------------------+-------------+\n | ``__module__`` | The name of the module the | Writable |\n | | function was defined in, or | |\n | | ``None`` if unavailable. | |\n +-------------------------+---------------------------------+-------------+\n | ``func_defaults`` | A tuple containing default | Writable |\n | | argument values for those | |\n | | arguments that have defaults, | |\n | | or ``None`` if no arguments | |\n | | have a default value | |\n +-------------------------+---------------------------------+-------------+\n | ``func_code`` | The code object representing | Writable |\n | | the compiled function body. | |\n +-------------------------+---------------------------------+-------------+\n | ``func_globals`` | A reference to the dictionary | Read-only |\n | | that holds the function\'s | |\n | | global variables --- the global | |\n | | namespace of the module in | |\n | | which the function was defined. | |\n +-------------------------+---------------------------------+-------------+\n | ``func_dict`` | The namespace supporting | Writable |\n | | arbitrary function attributes. | |\n +-------------------------+---------------------------------+-------------+\n | ``func_closure`` | ``None`` or a tuple of cells | Read-only |\n | | that contain bindings for the | |\n | | function\'s free variables. | |\n +-------------------------+---------------------------------+-------------+\n\n Most of the attributes labelled "Writable" check the type of the\n assigned value.\n\n Changed in version 2.4: ``func_name`` is now writable.\n\n Function objects also support getting and setting arbitrary\n attributes, which can be used, for example, to attach metadata\n to functions. Regular attribute dot-notation is used to get and\n set such attributes. *Note that the current implementation only\n supports function attributes on user-defined functions. Function\n attributes on built-in functions may be supported in the\n future.*\n\n Additional information about a function\'s definition can be\n retrieved from its code object; see the description of internal\n types below.\n\n User-defined methods\n A user-defined method object combines a class, a class instance\n (or ``None``) and any callable object (normally a user-defined\n function).\n\n Special read-only attributes: ``im_self`` is the class instance\n object, ``im_func`` is the function object; ``im_class`` is the\n class of ``im_self`` for bound methods or the class that asked\n for the method for unbound methods; ``__doc__`` is the method\'s\n documentation (same as ``im_func.__doc__``); ``__name__`` is the\n method name (same as ``im_func.__name__``); ``__module__`` is\n the name of the module the method was defined in, or ``None`` if\n unavailable.\n\n Changed in version 2.2: ``im_self`` used to refer to the class\n that defined the method.\n\n Changed in version 2.6: For Python 3 forward-compatibility,\n ``im_func`` is also available as ``__func__``, and ``im_self``\n as ``__self__``.\n\n Methods also support accessing (but not setting) the arbitrary\n function attributes on the underlying function object.\n\n User-defined method objects may be created when getting an\n attribute of a class (perhaps via an instance of that class), if\n that attribute is a user-defined function object, an unbound\n user-defined method object, or a class method object. When the\n attribute is a user-defined method object, a new method object\n is only created if the class from which it is being retrieved is\n the same as, or a derived class of, the class stored in the\n original method object; otherwise, the original method object is\n used as it is.\n\n When a user-defined method object is created by retrieving a\n user-defined function object from a class, its ``im_self``\n attribute is ``None`` and the method object is said to be\n unbound. When one is created by retrieving a user-defined\n function object from a class via one of its instances, its\n ``im_self`` attribute is the instance, and the method object is\n said to be bound. In either case, the new method\'s ``im_class``\n attribute is the class from which the retrieval takes place, and\n its ``im_func`` attribute is the original function object.\n\n When a user-defined method object is created by retrieving\n another method object from a class or instance, the behaviour is\n the same as for a function object, except that the ``im_func``\n attribute of the new instance is not the original method object\n but its ``im_func`` attribute.\n\n When a user-defined method object is created by retrieving a\n class method object from a class or instance, its ``im_self``\n attribute is the class itself (the same as the ``im_class``\n attribute), and its ``im_func`` attribute is the function object\n underlying the class method.\n\n When an unbound user-defined method object is called, the\n underlying function (``im_func``) is called, with the\n restriction that the first argument must be an instance of the\n proper class (``im_class``) or of a derived class thereof.\n\n When a bound user-defined method object is called, the\n underlying function (``im_func``) is called, inserting the class\n instance (``im_self``) in front of the argument list. For\n instance, when ``C`` is a class which contains a definition for\n a function ``f()``, and ``x`` is an instance of ``C``, calling\n ``x.f(1)`` is equivalent to calling ``C.f(x, 1)``.\n\n When a user-defined method object is derived from a class method\n object, the "class instance" stored in ``im_self`` will actually\n be the class itself, so that calling either ``x.f(1)`` or\n ``C.f(1)`` is equivalent to calling ``f(C,1)`` where ``f`` is\n the underlying function.\n\n Note that the transformation from function object to (unbound or\n bound) method object happens each time the attribute is\n retrieved from the class or instance. In some cases, a fruitful\n optimization is to assign the attribute to a local variable and\n call that local variable. Also notice that this transformation\n only happens for user-defined functions; other callable objects\n (and all non-callable objects) are retrieved without\n transformation. It is also important to note that user-defined\n functions which are attributes of a class instance are not\n converted to bound methods; this *only* happens when the\n function is an attribute of the class.\n\n Generator functions\n A function or method which uses the ``yield`` statement (see\n section *The yield statement*) is called a *generator function*.\n Such a function, when called, always returns an iterator object\n which can be used to execute the body of the function: calling\n the iterator\'s ``next()`` method will cause the function to\n execute until it provides a value using the ``yield`` statement.\n When the function executes a ``return`` statement or falls off\n the end, a ``StopIteration`` exception is raised and the\n iterator will have reached the end of the set of values to be\n returned.\n\n Built-in functions\n A built-in function object is a wrapper around a C function.\n Examples of built-in functions are ``len()`` and ``math.sin()``\n (``math`` is a standard built-in module). The number and type of\n the arguments are determined by the C function. Special read-\n only attributes: ``__doc__`` is the function\'s documentation\n string, or ``None`` if unavailable; ``__name__`` is the\n function\'s name; ``__self__`` is set to ``None`` (but see the\n next item); ``__module__`` is the name of the module the\n function was defined in or ``None`` if unavailable.\n\n Built-in methods\n This is really a different disguise of a built-in function, this\n time containing an object passed to the C function as an\n implicit extra argument. An example of a built-in method is\n ``alist.append()``, assuming *alist* is a list object. In this\n case, the special read-only attribute ``__self__`` is set to the\n object denoted by *alist*.\n\n Class Types\n Class types, or "new-style classes," are callable. These\n objects normally act as factories for new instances of\n themselves, but variations are possible for class types that\n override ``__new__()``. The arguments of the call are passed to\n ``__new__()`` and, in the typical case, to ``__init__()`` to\n initialize the new instance.\n\n Classic Classes\n Class objects are described below. When a class object is\n called, a new class instance (also described below) is created\n and returned. This implies a call to the class\'s ``__init__()``\n method if it has one. Any arguments are passed on to the\n ``__init__()`` method. If there is no ``__init__()`` method,\n the class must be called without arguments.\n\n Class instances\n Class instances are described below. Class instances are\n callable only when the class has a ``__call__()`` method;\n ``x(arguments)`` is a shorthand for ``x.__call__(arguments)``.\n\nModules\n Modules are imported by the ``import`` statement (see section *The\n import statement*). A module object has a namespace implemented by\n a dictionary object (this is the dictionary referenced by the\n func_globals attribute of functions defined in the module).\n Attribute references are translated to lookups in this dictionary,\n e.g., ``m.x`` is equivalent to ``m.__dict__["x"]``. A module object\n does not contain the code object used to initialize the module\n (since it isn\'t needed once the initialization is done).\n\n Attribute assignment updates the module\'s namespace dictionary,\n e.g., ``m.x = 1`` is equivalent to ``m.__dict__["x"] = 1``.\n\n Special read-only attribute: ``__dict__`` is the module\'s namespace\n as a dictionary object.\n\n **CPython implementation detail:** Because of the way CPython\n clears module dictionaries, the module dictionary will be cleared\n when the module falls out of scope even if the dictionary still has\n live references. To avoid this, copy the dictionary or keep the\n module around while using its dictionary directly.\n\n Predefined (writable) attributes: ``__name__`` is the module\'s\n name; ``__doc__`` is the module\'s documentation string, or ``None``\n if unavailable; ``__file__`` is the pathname of the file from which\n the module was loaded, if it was loaded from a file. The\n ``__file__`` attribute is not present for C modules that are\n statically linked into the interpreter; for extension modules\n loaded dynamically from a shared library, it is the pathname of the\n shared library file.\n\nClasses\n Both class types (new-style classes) and class objects (old-\n style/classic classes) are typically created by class definitions\n (see section *Class definitions*). A class has a namespace\n implemented by a dictionary object. Class attribute references are\n translated to lookups in this dictionary, e.g., ``C.x`` is\n translated to ``C.__dict__["x"]`` (although for new-style classes\n in particular there are a number of hooks which allow for other\n means of locating attributes). When the attribute name is not found\n there, the attribute search continues in the base classes. For\n old-style classes, the search is depth-first, left-to-right in the\n order of occurrence in the base class list. New-style classes use\n the more complex C3 method resolution order which behaves correctly\n even in the presence of \'diamond\' inheritance structures where\n there are multiple inheritance paths leading back to a common\n ancestor. Additional details on the C3 MRO used by new-style\n classes can be found in the documentation accompanying the 2.3\n release at http://www.python.org/download/releases/2.3/mro/.\n\n When a class attribute reference (for class ``C``, say) would yield\n a user-defined function object or an unbound user-defined method\n object whose associated class is either ``C`` or one of its base\n classes, it is transformed into an unbound user-defined method\n object whose ``im_class`` attribute is ``C``. When it would yield a\n class method object, it is transformed into a bound user-defined\n method object whose ``im_class`` and ``im_self`` attributes are\n both ``C``. When it would yield a static method object, it is\n transformed into the object wrapped by the static method object.\n See section *Implementing Descriptors* for another way in which\n attributes retrieved from a class may differ from those actually\n contained in its ``__dict__`` (note that only new-style classes\n support descriptors).\n\n Class attribute assignments update the class\'s dictionary, never\n the dictionary of a base class.\n\n A class object can be called (see above) to yield a class instance\n (see below).\n\n Special attributes: ``__name__`` is the class name; ``__module__``\n is the module name in which the class was defined; ``__dict__`` is\n the dictionary containing the class\'s namespace; ``__bases__`` is a\n tuple (possibly empty or a singleton) containing the base classes,\n in the order of their occurrence in the base class list;\n ``__doc__`` is the class\'s documentation string, or None if\n undefined.\n\nClass instances\n A class instance is created by calling a class object (see above).\n A class instance has a namespace implemented as a dictionary which\n is the first place in which attribute references are searched.\n When an attribute is not found there, and the instance\'s class has\n an attribute by that name, the search continues with the class\n attributes. If a class attribute is found that is a user-defined\n function object or an unbound user-defined method object whose\n associated class is the class (call it ``C``) of the instance for\n which the attribute reference was initiated or one of its bases, it\n is transformed into a bound user-defined method object whose\n ``im_class`` attribute is ``C`` and whose ``im_self`` attribute is\n the instance. Static method and class method objects are also\n transformed, as if they had been retrieved from class ``C``; see\n above under "Classes". See section *Implementing Descriptors* for\n another way in which attributes of a class retrieved via its\n instances may differ from the objects actually stored in the\n class\'s ``__dict__``. If no class attribute is found, and the\n object\'s class has a ``__getattr__()`` method, that is called to\n satisfy the lookup.\n\n Attribute assignments and deletions update the instance\'s\n dictionary, never a class\'s dictionary. If the class has a\n ``__setattr__()`` or ``__delattr__()`` method, this is called\n instead of updating the instance dictionary directly.\n\n Class instances can pretend to be numbers, sequences, or mappings\n if they have methods with certain special names. See section\n *Special method names*.\n\n Special attributes: ``__dict__`` is the attribute dictionary;\n ``__class__`` is the instance\'s class.\n\nFiles\n A file object represents an open file. File objects are created by\n the ``open()`` built-in function, and also by ``os.popen()``,\n ``os.fdopen()``, and the ``makefile()`` method of socket objects\n (and perhaps by other functions or methods provided by extension\n modules). The objects ``sys.stdin``, ``sys.stdout`` and\n ``sys.stderr`` are initialized to file objects corresponding to the\n interpreter\'s standard input, output and error streams. See *File\n Objects* for complete documentation of file objects.\n\nInternal types\n A few types used internally by the interpreter are exposed to the\n user. Their definitions may change with future versions of the\n interpreter, but they are mentioned here for completeness.\n\n Code objects\n Code objects represent *byte-compiled* executable Python code,\n or *bytecode*. The difference between a code object and a\n function object is that the function object contains an explicit\n reference to the function\'s globals (the module in which it was\n defined), while a code object contains no context; also the\n default argument values are stored in the function object, not\n in the code object (because they represent values calculated at\n run-time). Unlike function objects, code objects are immutable\n and contain no references (directly or indirectly) to mutable\n objects.\n\n Special read-only attributes: ``co_name`` gives the function\n name; ``co_argcount`` is the number of positional arguments\n (including arguments with default values); ``co_nlocals`` is the\n number of local variables used by the function (including\n arguments); ``co_varnames`` is a tuple containing the names of\n the local variables (starting with the argument names);\n ``co_cellvars`` is a tuple containing the names of local\n variables that are referenced by nested functions;\n ``co_freevars`` is a tuple containing the names of free\n variables; ``co_code`` is a string representing the sequence of\n bytecode instructions; ``co_consts`` is a tuple containing the\n literals used by the bytecode; ``co_names`` is a tuple\n containing the names used by the bytecode; ``co_filename`` is\n the filename from which the code was compiled;\n ``co_firstlineno`` is the first line number of the function;\n ``co_lnotab`` is a string encoding the mapping from bytecode\n offsets to line numbers (for details see the source code of the\n interpreter); ``co_stacksize`` is the required stack size\n (including local variables); ``co_flags`` is an integer encoding\n a number of flags for the interpreter.\n\n The following flag bits are defined for ``co_flags``: bit\n ``0x04`` is set if the function uses the ``*arguments`` syntax\n to accept an arbitrary number of positional arguments; bit\n ``0x08`` is set if the function uses the ``**keywords`` syntax\n to accept arbitrary keyword arguments; bit ``0x20`` is set if\n the function is a generator.\n\n Future feature declarations (``from __future__ import\n division``) also use bits in ``co_flags`` to indicate whether a\n code object was compiled with a particular feature enabled: bit\n ``0x2000`` is set if the function was compiled with future\n division enabled; bits ``0x10`` and ``0x1000`` were used in\n earlier versions of Python.\n\n Other bits in ``co_flags`` are reserved for internal use.\n\n If a code object represents a function, the first item in\n ``co_consts`` is the documentation string of the function, or\n ``None`` if undefined.\n\n Frame objects\n Frame objects represent execution frames. They may occur in\n traceback objects (see below).\n\n Special read-only attributes: ``f_back`` is to the previous\n stack frame (towards the caller), or ``None`` if this is the\n bottom stack frame; ``f_code`` is the code object being executed\n in this frame; ``f_locals`` is the dictionary used to look up\n local variables; ``f_globals`` is used for global variables;\n ``f_builtins`` is used for built-in (intrinsic) names;\n ``f_restricted`` is a flag indicating whether the function is\n executing in restricted execution mode; ``f_lasti`` gives the\n precise instruction (this is an index into the bytecode string\n of the code object).\n\n Special writable attributes: ``f_trace``, if not ``None``, is a\n function called at the start of each source code line (this is\n used by the debugger); ``f_exc_type``, ``f_exc_value``,\n ``f_exc_traceback`` represent the last exception raised in the\n parent frame provided another exception was ever raised in the\n current frame (in all other cases they are None); ``f_lineno``\n is the current line number of the frame --- writing to this from\n within a trace function jumps to the given line (only for the\n bottom-most frame). A debugger can implement a Jump command\n (aka Set Next Statement) by writing to f_lineno.\n\n Traceback objects\n Traceback objects represent a stack trace of an exception. A\n traceback object is created when an exception occurs. When the\n search for an exception handler unwinds the execution stack, at\n each unwound level a traceback object is inserted in front of\n the current traceback. When an exception handler is entered,\n the stack trace is made available to the program. (See section\n *The try statement*.) It is accessible as ``sys.exc_traceback``,\n and also as the third item of the tuple returned by\n ``sys.exc_info()``. The latter is the preferred interface,\n since it works correctly when the program is using multiple\n threads. When the program contains no suitable handler, the\n stack trace is written (nicely formatted) to the standard error\n stream; if the interpreter is interactive, it is also made\n available to the user as ``sys.last_traceback``.\n\n Special read-only attributes: ``tb_next`` is the next level in\n the stack trace (towards the frame where the exception\n occurred), or ``None`` if there is no next level; ``tb_frame``\n points to the execution frame of the current level;\n ``tb_lineno`` gives the line number where the exception\n occurred; ``tb_lasti`` indicates the precise instruction. The\n line number and last instruction in the traceback may differ\n from the line number of its frame object if the exception\n occurred in a ``try`` statement with no matching except clause\n or with a finally clause.\n\n Slice objects\n Slice objects are used to represent slices when *extended slice\n syntax* is used. This is a slice using two colons, or multiple\n slices or ellipses separated by commas, e.g., ``a[i:j:step]``,\n ``a[i:j, k:l]``, or ``a[..., i:j]``. They are also created by\n the built-in ``slice()`` function.\n\n Special read-only attributes: ``start`` is the lower bound;\n ``stop`` is the upper bound; ``step`` is the step value; each is\n ``None`` if omitted. These attributes can have any type.\n\n Slice objects support one method:\n\n slice.indices(self, length)\n\n This method takes a single integer argument *length* and\n computes information about the extended slice that the slice\n object would describe if applied to a sequence of *length*\n items. It returns a tuple of three integers; respectively\n these are the *start* and *stop* indices and the *step* or\n stride length of the slice. Missing or out-of-bounds indices\n are handled in a manner consistent with regular slices.\n\n New in version 2.3.\n\n Static method objects\n Static method objects provide a way of defeating the\n transformation of function objects to method objects described\n above. A static method object is a wrapper around any other\n object, usually a user-defined method object. When a static\n method object is retrieved from a class or a class instance, the\n object actually returned is the wrapped object, which is not\n subject to any further transformation. Static method objects are\n not themselves callable, although the objects they wrap usually\n are. Static method objects are created by the built-in\n ``staticmethod()`` constructor.\n\n Class method objects\n A class method object, like a static method object, is a wrapper\n around another object that alters the way in which that object\n is retrieved from classes and class instances. The behaviour of\n class method objects upon such retrieval is described above,\n under "User-defined methods". Class method objects are created\n by the built-in ``classmethod()`` constructor.\n', + 'types': '\nThe standard type hierarchy\n***************************\n\nBelow is a list of the types that are built into Python. Extension\nmodules (written in C, Java, or other languages, depending on the\nimplementation) can define additional types. Future versions of\nPython may add types to the type hierarchy (e.g., rational numbers,\nefficiently stored arrays of integers, etc.).\n\nSome of the type descriptions below contain a paragraph listing\n\'special attributes.\' These are attributes that provide access to the\nimplementation and are not intended for general use. Their definition\nmay change in the future.\n\nNone\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name ``None``.\n It is used to signify the absence of a value in many situations,\n e.g., it is returned from functions that don\'t explicitly return\n anything. Its truth value is false.\n\nNotImplemented\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name\n ``NotImplemented``. Numeric methods and rich comparison methods may\n return this value if they do not implement the operation for the\n operands provided. (The interpreter will then try the reflected\n operation, or some other fallback, depending on the operator.) Its\n truth value is true.\n\nEllipsis\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name\n ``Ellipsis``. It is used to indicate the presence of the ``...``\n syntax in a slice. Its truth value is true.\n\n``numbers.Number``\n These are created by numeric literals and returned as results by\n arithmetic operators and arithmetic built-in functions. Numeric\n objects are immutable; once created their value never changes.\n Python numbers are of course strongly related to mathematical\n numbers, but subject to the limitations of numerical representation\n in computers.\n\n Python distinguishes between integers, floating point numbers, and\n complex numbers:\n\n ``numbers.Integral``\n These represent elements from the mathematical set of integers\n (positive and negative).\n\n There are three types of integers:\n\n Plain integers\n These represent numbers in the range -2147483648 through\n 2147483647. (The range may be larger on machines with a\n larger natural word size, but not smaller.) When the result\n of an operation would fall outside this range, the result is\n normally returned as a long integer (in some cases, the\n exception ``OverflowError`` is raised instead). For the\n purpose of shift and mask operations, integers are assumed to\n have a binary, 2\'s complement notation using 32 or more bits,\n and hiding no bits from the user (i.e., all 4294967296\n different bit patterns correspond to different values).\n\n Long integers\n These represent numbers in an unlimited range, subject to\n available (virtual) memory only. For the purpose of shift\n and mask operations, a binary representation is assumed, and\n negative numbers are represented in a variant of 2\'s\n complement which gives the illusion of an infinite string of\n sign bits extending to the left.\n\n Booleans\n These represent the truth values False and True. The two\n objects representing the values False and True are the only\n Boolean objects. The Boolean type is a subtype of plain\n integers, and Boolean values behave like the values 0 and 1,\n respectively, in almost all contexts, the exception being\n that when converted to a string, the strings ``"False"`` or\n ``"True"`` are returned, respectively.\n\n The rules for integer representation are intended to give the\n most meaningful interpretation of shift and mask operations\n involving negative integers and the least surprises when\n switching between the plain and long integer domains. Any\n operation, if it yields a result in the plain integer domain,\n will yield the same result in the long integer domain or when\n using mixed operands. The switch between domains is transparent\n to the programmer.\n\n ``numbers.Real`` (``float``)\n These represent machine-level double precision floating point\n numbers. You are at the mercy of the underlying machine\n architecture (and C or Java implementation) for the accepted\n range and handling of overflow. Python does not support single-\n precision floating point numbers; the savings in processor and\n memory usage that are usually the reason for using these is\n dwarfed by the overhead of using objects in Python, so there is\n no reason to complicate the language with two kinds of floating\n point numbers.\n\n ``numbers.Complex``\n These represent complex numbers as a pair of machine-level\n double precision floating point numbers. The same caveats apply\n as for floating point numbers. The real and imaginary parts of a\n complex number ``z`` can be retrieved through the read-only\n attributes ``z.real`` and ``z.imag``.\n\nSequences\n These represent finite ordered sets indexed by non-negative\n numbers. The built-in function ``len()`` returns the number of\n items of a sequence. When the length of a sequence is *n*, the\n index set contains the numbers 0, 1, ..., *n*-1. Item *i* of\n sequence *a* is selected by ``a[i]``.\n\n Sequences also support slicing: ``a[i:j]`` selects all items with\n index *k* such that *i* ``<=`` *k* ``<`` *j*. When used as an\n expression, a slice is a sequence of the same type. This implies\n that the index set is renumbered so that it starts at 0.\n\n Some sequences also support "extended slicing" with a third "step"\n parameter: ``a[i:j:k]`` selects all items of *a* with index *x*\n where ``x = i + n*k``, *n* ``>=`` ``0`` and *i* ``<=`` *x* ``<``\n *j*.\n\n Sequences are distinguished according to their mutability:\n\n Immutable sequences\n An object of an immutable sequence type cannot change once it is\n created. (If the object contains references to other objects,\n these other objects may be mutable and may be changed; however,\n the collection of objects directly referenced by an immutable\n object cannot change.)\n\n The following types are immutable sequences:\n\n Strings\n The items of a string are characters. There is no separate\n character type; a character is represented by a string of one\n item. Characters represent (at least) 8-bit bytes. The\n built-in functions ``chr()`` and ``ord()`` convert between\n characters and nonnegative integers representing the byte\n values. Bytes with the values 0-127 usually represent the\n corresponding ASCII values, but the interpretation of values\n is up to the program. The string data type is also used to\n represent arrays of bytes, e.g., to hold data read from a\n file.\n\n (On systems whose native character set is not ASCII, strings\n may use EBCDIC in their internal representation, provided the\n functions ``chr()`` and ``ord()`` implement a mapping between\n ASCII and EBCDIC, and string comparison preserves the ASCII\n order. Or perhaps someone can propose a better rule?)\n\n Unicode\n The items of a Unicode object are Unicode code units. A\n Unicode code unit is represented by a Unicode object of one\n item and can hold either a 16-bit or 32-bit value\n representing a Unicode ordinal (the maximum value for the\n ordinal is given in ``sys.maxunicode``, and depends on how\n Python is configured at compile time). Surrogate pairs may\n be present in the Unicode object, and will be reported as two\n separate items. The built-in functions ``unichr()`` and\n ``ord()`` convert between code units and nonnegative integers\n representing the Unicode ordinals as defined in the Unicode\n Standard 3.0. Conversion from and to other encodings are\n possible through the Unicode method ``encode()`` and the\n built-in function ``unicode()``.\n\n Tuples\n The items of a tuple are arbitrary Python objects. Tuples of\n two or more items are formed by comma-separated lists of\n expressions. A tuple of one item (a \'singleton\') can be\n formed by affixing a comma to an expression (an expression by\n itself does not create a tuple, since parentheses must be\n usable for grouping of expressions). An empty tuple can be\n formed by an empty pair of parentheses.\n\n Mutable sequences\n Mutable sequences can be changed after they are created. The\n subscription and slicing notations can be used as the target of\n assignment and ``del`` (delete) statements.\n\n There are currently two intrinsic mutable sequence types:\n\n Lists\n The items of a list are arbitrary Python objects. Lists are\n formed by placing a comma-separated list of expressions in\n square brackets. (Note that there are no special cases needed\n to form lists of length 0 or 1.)\n\n Byte Arrays\n A bytearray object is a mutable array. They are created by\n the built-in ``bytearray()`` constructor. Aside from being\n mutable (and hence unhashable), byte arrays otherwise provide\n the same interface and functionality as immutable bytes\n objects.\n\n The extension module ``array`` provides an additional example of\n a mutable sequence type.\n\nSet types\n These represent unordered, finite sets of unique, immutable\n objects. As such, they cannot be indexed by any subscript. However,\n they can be iterated over, and the built-in function ``len()``\n returns the number of items in a set. Common uses for sets are fast\n membership testing, removing duplicates from a sequence, and\n computing mathematical operations such as intersection, union,\n difference, and symmetric difference.\n\n For set elements, the same immutability rules apply as for\n dictionary keys. Note that numeric types obey the normal rules for\n numeric comparison: if two numbers compare equal (e.g., ``1`` and\n ``1.0``), only one of them can be contained in a set.\n\n There are currently two intrinsic set types:\n\n Sets\n These represent a mutable set. They are created by the built-in\n ``set()`` constructor and can be modified afterwards by several\n methods, such as ``add()``.\n\n Frozen sets\n These represent an immutable set. They are created by the\n built-in ``frozenset()`` constructor. As a frozenset is\n immutable and *hashable*, it can be used again as an element of\n another set, or as a dictionary key.\n\nMappings\n These represent finite sets of objects indexed by arbitrary index\n sets. The subscript notation ``a[k]`` selects the item indexed by\n ``k`` from the mapping ``a``; this can be used in expressions and\n as the target of assignments or ``del`` statements. The built-in\n function ``len()`` returns the number of items in a mapping.\n\n There is currently a single intrinsic mapping type:\n\n Dictionaries\n These represent finite sets of objects indexed by nearly\n arbitrary values. The only types of values not acceptable as\n keys are values containing lists or dictionaries or other\n mutable types that are compared by value rather than by object\n identity, the reason being that the efficient implementation of\n dictionaries requires a key\'s hash value to remain constant.\n Numeric types used for keys obey the normal rules for numeric\n comparison: if two numbers compare equal (e.g., ``1`` and\n ``1.0``) then they can be used interchangeably to index the same\n dictionary entry.\n\n Dictionaries are mutable; they can be created by the ``{...}``\n notation (see section *Dictionary displays*).\n\n The extension modules ``dbm``, ``gdbm``, and ``bsddb`` provide\n additional examples of mapping types.\n\nCallable types\n These are the types to which the function call operation (see\n section *Calls*) can be applied:\n\n User-defined functions\n A user-defined function object is created by a function\n definition (see section *Function definitions*). It should be\n called with an argument list containing the same number of items\n as the function\'s formal parameter list.\n\n Special attributes:\n\n +-------------------------+---------------------------------+-------------+\n | Attribute | Meaning | |\n +=========================+=================================+=============+\n | ``func_doc`` | The function\'s documentation | Writable |\n | | string, or ``None`` if | |\n | | unavailable | |\n +-------------------------+---------------------------------+-------------+\n | ``__doc__`` | Another way of spelling | Writable |\n | | ``func_doc`` | |\n +-------------------------+---------------------------------+-------------+\n | ``func_name`` | The function\'s name | Writable |\n +-------------------------+---------------------------------+-------------+\n | ``__name__`` | Another way of spelling | Writable |\n | | ``func_name`` | |\n +-------------------------+---------------------------------+-------------+\n | ``__module__`` | The name of the module the | Writable |\n | | function was defined in, or | |\n | | ``None`` if unavailable. | |\n +-------------------------+---------------------------------+-------------+\n | ``func_defaults`` | A tuple containing default | Writable |\n | | argument values for those | |\n | | arguments that have defaults, | |\n | | or ``None`` if no arguments | |\n | | have a default value | |\n +-------------------------+---------------------------------+-------------+\n | ``func_code`` | The code object representing | Writable |\n | | the compiled function body. | |\n +-------------------------+---------------------------------+-------------+\n | ``func_globals`` | A reference to the dictionary | Read-only |\n | | that holds the function\'s | |\n | | global variables --- the global | |\n | | namespace of the module in | |\n | | which the function was defined. | |\n +-------------------------+---------------------------------+-------------+\n | ``func_dict`` | The namespace supporting | Writable |\n | | arbitrary function attributes. | |\n +-------------------------+---------------------------------+-------------+\n | ``func_closure`` | ``None`` or a tuple of cells | Read-only |\n | | that contain bindings for the | |\n | | function\'s free variables. | |\n +-------------------------+---------------------------------+-------------+\n\n Most of the attributes labelled "Writable" check the type of the\n assigned value.\n\n Changed in version 2.4: ``func_name`` is now writable.\n\n Function objects also support getting and setting arbitrary\n attributes, which can be used, for example, to attach metadata\n to functions. Regular attribute dot-notation is used to get and\n set such attributes. *Note that the current implementation only\n supports function attributes on user-defined functions. Function\n attributes on built-in functions may be supported in the\n future.*\n\n Additional information about a function\'s definition can be\n retrieved from its code object; see the description of internal\n types below.\n\n User-defined methods\n A user-defined method object combines a class, a class instance\n (or ``None``) and any callable object (normally a user-defined\n function).\n\n Special read-only attributes: ``im_self`` is the class instance\n object, ``im_func`` is the function object; ``im_class`` is the\n class of ``im_self`` for bound methods or the class that asked\n for the method for unbound methods; ``__doc__`` is the method\'s\n documentation (same as ``im_func.__doc__``); ``__name__`` is the\n method name (same as ``im_func.__name__``); ``__module__`` is\n the name of the module the method was defined in, or ``None`` if\n unavailable.\n\n Changed in version 2.2: ``im_self`` used to refer to the class\n that defined the method.\n\n Changed in version 2.6: For Python 3 forward-compatibility,\n ``im_func`` is also available as ``__func__``, and ``im_self``\n as ``__self__``.\n\n Methods also support accessing (but not setting) the arbitrary\n function attributes on the underlying function object.\n\n User-defined method objects may be created when getting an\n attribute of a class (perhaps via an instance of that class), if\n that attribute is a user-defined function object, an unbound\n user-defined method object, or a class method object. When the\n attribute is a user-defined method object, a new method object\n is only created if the class from which it is being retrieved is\n the same as, or a derived class of, the class stored in the\n original method object; otherwise, the original method object is\n used as it is.\n\n When a user-defined method object is created by retrieving a\n user-defined function object from a class, its ``im_self``\n attribute is ``None`` and the method object is said to be\n unbound. When one is created by retrieving a user-defined\n function object from a class via one of its instances, its\n ``im_self`` attribute is the instance, and the method object is\n said to be bound. In either case, the new method\'s ``im_class``\n attribute is the class from which the retrieval takes place, and\n its ``im_func`` attribute is the original function object.\n\n When a user-defined method object is created by retrieving\n another method object from a class or instance, the behaviour is\n the same as for a function object, except that the ``im_func``\n attribute of the new instance is not the original method object\n but its ``im_func`` attribute.\n\n When a user-defined method object is created by retrieving a\n class method object from a class or instance, its ``im_self``\n attribute is the class itself, and its ``im_func`` attribute is\n the function object underlying the class method.\n\n When an unbound user-defined method object is called, the\n underlying function (``im_func``) is called, with the\n restriction that the first argument must be an instance of the\n proper class (``im_class``) or of a derived class thereof.\n\n When a bound user-defined method object is called, the\n underlying function (``im_func``) is called, inserting the class\n instance (``im_self``) in front of the argument list. For\n instance, when ``C`` is a class which contains a definition for\n a function ``f()``, and ``x`` is an instance of ``C``, calling\n ``x.f(1)`` is equivalent to calling ``C.f(x, 1)``.\n\n When a user-defined method object is derived from a class method\n object, the "class instance" stored in ``im_self`` will actually\n be the class itself, so that calling either ``x.f(1)`` or\n ``C.f(1)`` is equivalent to calling ``f(C,1)`` where ``f`` is\n the underlying function.\n\n Note that the transformation from function object to (unbound or\n bound) method object happens each time the attribute is\n retrieved from the class or instance. In some cases, a fruitful\n optimization is to assign the attribute to a local variable and\n call that local variable. Also notice that this transformation\n only happens for user-defined functions; other callable objects\n (and all non-callable objects) are retrieved without\n transformation. It is also important to note that user-defined\n functions which are attributes of a class instance are not\n converted to bound methods; this *only* happens when the\n function is an attribute of the class.\n\n Generator functions\n A function or method which uses the ``yield`` statement (see\n section *The yield statement*) is called a *generator function*.\n Such a function, when called, always returns an iterator object\n which can be used to execute the body of the function: calling\n the iterator\'s ``next()`` method will cause the function to\n execute until it provides a value using the ``yield`` statement.\n When the function executes a ``return`` statement or falls off\n the end, a ``StopIteration`` exception is raised and the\n iterator will have reached the end of the set of values to be\n returned.\n\n Built-in functions\n A built-in function object is a wrapper around a C function.\n Examples of built-in functions are ``len()`` and ``math.sin()``\n (``math`` is a standard built-in module). The number and type of\n the arguments are determined by the C function. Special read-\n only attributes: ``__doc__`` is the function\'s documentation\n string, or ``None`` if unavailable; ``__name__`` is the\n function\'s name; ``__self__`` is set to ``None`` (but see the\n next item); ``__module__`` is the name of the module the\n function was defined in or ``None`` if unavailable.\n\n Built-in methods\n This is really a different disguise of a built-in function, this\n time containing an object passed to the C function as an\n implicit extra argument. An example of a built-in method is\n ``alist.append()``, assuming *alist* is a list object. In this\n case, the special read-only attribute ``__self__`` is set to the\n object denoted by *alist*.\n\n Class Types\n Class types, or "new-style classes," are callable. These\n objects normally act as factories for new instances of\n themselves, but variations are possible for class types that\n override ``__new__()``. The arguments of the call are passed to\n ``__new__()`` and, in the typical case, to ``__init__()`` to\n initialize the new instance.\n\n Classic Classes\n Class objects are described below. When a class object is\n called, a new class instance (also described below) is created\n and returned. This implies a call to the class\'s ``__init__()``\n method if it has one. Any arguments are passed on to the\n ``__init__()`` method. If there is no ``__init__()`` method,\n the class must be called without arguments.\n\n Class instances\n Class instances are described below. Class instances are\n callable only when the class has a ``__call__()`` method;\n ``x(arguments)`` is a shorthand for ``x.__call__(arguments)``.\n\nModules\n Modules are imported by the ``import`` statement (see section *The\n import statement*). A module object has a namespace implemented by\n a dictionary object (this is the dictionary referenced by the\n func_globals attribute of functions defined in the module).\n Attribute references are translated to lookups in this dictionary,\n e.g., ``m.x`` is equivalent to ``m.__dict__["x"]``. A module object\n does not contain the code object used to initialize the module\n (since it isn\'t needed once the initialization is done).\n\n Attribute assignment updates the module\'s namespace dictionary,\n e.g., ``m.x = 1`` is equivalent to ``m.__dict__["x"] = 1``.\n\n Special read-only attribute: ``__dict__`` is the module\'s namespace\n as a dictionary object.\n\n **CPython implementation detail:** Because of the way CPython\n clears module dictionaries, the module dictionary will be cleared\n when the module falls out of scope even if the dictionary still has\n live references. To avoid this, copy the dictionary or keep the\n module around while using its dictionary directly.\n\n Predefined (writable) attributes: ``__name__`` is the module\'s\n name; ``__doc__`` is the module\'s documentation string, or ``None``\n if unavailable; ``__file__`` is the pathname of the file from which\n the module was loaded, if it was loaded from a file. The\n ``__file__`` attribute is not present for C modules that are\n statically linked into the interpreter; for extension modules\n loaded dynamically from a shared library, it is the pathname of the\n shared library file.\n\nClasses\n Both class types (new-style classes) and class objects (old-\n style/classic classes) are typically created by class definitions\n (see section *Class definitions*). A class has a namespace\n implemented by a dictionary object. Class attribute references are\n translated to lookups in this dictionary, e.g., ``C.x`` is\n translated to ``C.__dict__["x"]`` (although for new-style classes\n in particular there are a number of hooks which allow for other\n means of locating attributes). When the attribute name is not found\n there, the attribute search continues in the base classes. For\n old-style classes, the search is depth-first, left-to-right in the\n order of occurrence in the base class list. New-style classes use\n the more complex C3 method resolution order which behaves correctly\n even in the presence of \'diamond\' inheritance structures where\n there are multiple inheritance paths leading back to a common\n ancestor. Additional details on the C3 MRO used by new-style\n classes can be found in the documentation accompanying the 2.3\n release at http://www.python.org/download/releases/2.3/mro/.\n\n When a class attribute reference (for class ``C``, say) would yield\n a user-defined function object or an unbound user-defined method\n object whose associated class is either ``C`` or one of its base\n classes, it is transformed into an unbound user-defined method\n object whose ``im_class`` attribute is ``C``. When it would yield a\n class method object, it is transformed into a bound user-defined\n method object whose ``im_self`` attribute is ``C``. When it would\n yield a static method object, it is transformed into the object\n wrapped by the static method object. See section *Implementing\n Descriptors* for another way in which attributes retrieved from a\n class may differ from those actually contained in its ``__dict__``\n (note that only new-style classes support descriptors).\n\n Class attribute assignments update the class\'s dictionary, never\n the dictionary of a base class.\n\n A class object can be called (see above) to yield a class instance\n (see below).\n\n Special attributes: ``__name__`` is the class name; ``__module__``\n is the module name in which the class was defined; ``__dict__`` is\n the dictionary containing the class\'s namespace; ``__bases__`` is a\n tuple (possibly empty or a singleton) containing the base classes,\n in the order of their occurrence in the base class list;\n ``__doc__`` is the class\'s documentation string, or None if\n undefined.\n\nClass instances\n A class instance is created by calling a class object (see above).\n A class instance has a namespace implemented as a dictionary which\n is the first place in which attribute references are searched.\n When an attribute is not found there, and the instance\'s class has\n an attribute by that name, the search continues with the class\n attributes. If a class attribute is found that is a user-defined\n function object or an unbound user-defined method object whose\n associated class is the class (call it ``C``) of the instance for\n which the attribute reference was initiated or one of its bases, it\n is transformed into a bound user-defined method object whose\n ``im_class`` attribute is ``C`` and whose ``im_self`` attribute is\n the instance. Static method and class method objects are also\n transformed, as if they had been retrieved from class ``C``; see\n above under "Classes". See section *Implementing Descriptors* for\n another way in which attributes of a class retrieved via its\n instances may differ from the objects actually stored in the\n class\'s ``__dict__``. If no class attribute is found, and the\n object\'s class has a ``__getattr__()`` method, that is called to\n satisfy the lookup.\n\n Attribute assignments and deletions update the instance\'s\n dictionary, never a class\'s dictionary. If the class has a\n ``__setattr__()`` or ``__delattr__()`` method, this is called\n instead of updating the instance dictionary directly.\n\n Class instances can pretend to be numbers, sequences, or mappings\n if they have methods with certain special names. See section\n *Special method names*.\n\n Special attributes: ``__dict__`` is the attribute dictionary;\n ``__class__`` is the instance\'s class.\n\nFiles\n A file object represents an open file. File objects are created by\n the ``open()`` built-in function, and also by ``os.popen()``,\n ``os.fdopen()``, and the ``makefile()`` method of socket objects\n (and perhaps by other functions or methods provided by extension\n modules). The objects ``sys.stdin``, ``sys.stdout`` and\n ``sys.stderr`` are initialized to file objects corresponding to the\n interpreter\'s standard input, output and error streams. See *File\n Objects* for complete documentation of file objects.\n\nInternal types\n A few types used internally by the interpreter are exposed to the\n user. Their definitions may change with future versions of the\n interpreter, but they are mentioned here for completeness.\n\n Code objects\n Code objects represent *byte-compiled* executable Python code,\n or *bytecode*. The difference between a code object and a\n function object is that the function object contains an explicit\n reference to the function\'s globals (the module in which it was\n defined), while a code object contains no context; also the\n default argument values are stored in the function object, not\n in the code object (because they represent values calculated at\n run-time). Unlike function objects, code objects are immutable\n and contain no references (directly or indirectly) to mutable\n objects.\n\n Special read-only attributes: ``co_name`` gives the function\n name; ``co_argcount`` is the number of positional arguments\n (including arguments with default values); ``co_nlocals`` is the\n number of local variables used by the function (including\n arguments); ``co_varnames`` is a tuple containing the names of\n the local variables (starting with the argument names);\n ``co_cellvars`` is a tuple containing the names of local\n variables that are referenced by nested functions;\n ``co_freevars`` is a tuple containing the names of free\n variables; ``co_code`` is a string representing the sequence of\n bytecode instructions; ``co_consts`` is a tuple containing the\n literals used by the bytecode; ``co_names`` is a tuple\n containing the names used by the bytecode; ``co_filename`` is\n the filename from which the code was compiled;\n ``co_firstlineno`` is the first line number of the function;\n ``co_lnotab`` is a string encoding the mapping from bytecode\n offsets to line numbers (for details see the source code of the\n interpreter); ``co_stacksize`` is the required stack size\n (including local variables); ``co_flags`` is an integer encoding\n a number of flags for the interpreter.\n\n The following flag bits are defined for ``co_flags``: bit\n ``0x04`` is set if the function uses the ``*arguments`` syntax\n to accept an arbitrary number of positional arguments; bit\n ``0x08`` is set if the function uses the ``**keywords`` syntax\n to accept arbitrary keyword arguments; bit ``0x20`` is set if\n the function is a generator.\n\n Future feature declarations (``from __future__ import\n division``) also use bits in ``co_flags`` to indicate whether a\n code object was compiled with a particular feature enabled: bit\n ``0x2000`` is set if the function was compiled with future\n division enabled; bits ``0x10`` and ``0x1000`` were used in\n earlier versions of Python.\n\n Other bits in ``co_flags`` are reserved for internal use.\n\n If a code object represents a function, the first item in\n ``co_consts`` is the documentation string of the function, or\n ``None`` if undefined.\n\n Frame objects\n Frame objects represent execution frames. They may occur in\n traceback objects (see below).\n\n Special read-only attributes: ``f_back`` is to the previous\n stack frame (towards the caller), or ``None`` if this is the\n bottom stack frame; ``f_code`` is the code object being executed\n in this frame; ``f_locals`` is the dictionary used to look up\n local variables; ``f_globals`` is used for global variables;\n ``f_builtins`` is used for built-in (intrinsic) names;\n ``f_restricted`` is a flag indicating whether the function is\n executing in restricted execution mode; ``f_lasti`` gives the\n precise instruction (this is an index into the bytecode string\n of the code object).\n\n Special writable attributes: ``f_trace``, if not ``None``, is a\n function called at the start of each source code line (this is\n used by the debugger); ``f_exc_type``, ``f_exc_value``,\n ``f_exc_traceback`` represent the last exception raised in the\n parent frame provided another exception was ever raised in the\n current frame (in all other cases they are None); ``f_lineno``\n is the current line number of the frame --- writing to this from\n within a trace function jumps to the given line (only for the\n bottom-most frame). A debugger can implement a Jump command\n (aka Set Next Statement) by writing to f_lineno.\n\n Traceback objects\n Traceback objects represent a stack trace of an exception. A\n traceback object is created when an exception occurs. When the\n search for an exception handler unwinds the execution stack, at\n each unwound level a traceback object is inserted in front of\n the current traceback. When an exception handler is entered,\n the stack trace is made available to the program. (See section\n *The try statement*.) It is accessible as ``sys.exc_traceback``,\n and also as the third item of the tuple returned by\n ``sys.exc_info()``. The latter is the preferred interface,\n since it works correctly when the program is using multiple\n threads. When the program contains no suitable handler, the\n stack trace is written (nicely formatted) to the standard error\n stream; if the interpreter is interactive, it is also made\n available to the user as ``sys.last_traceback``.\n\n Special read-only attributes: ``tb_next`` is the next level in\n the stack trace (towards the frame where the exception\n occurred), or ``None`` if there is no next level; ``tb_frame``\n points to the execution frame of the current level;\n ``tb_lineno`` gives the line number where the exception\n occurred; ``tb_lasti`` indicates the precise instruction. The\n line number and last instruction in the traceback may differ\n from the line number of its frame object if the exception\n occurred in a ``try`` statement with no matching except clause\n or with a finally clause.\n\n Slice objects\n Slice objects are used to represent slices when *extended slice\n syntax* is used. This is a slice using two colons, or multiple\n slices or ellipses separated by commas, e.g., ``a[i:j:step]``,\n ``a[i:j, k:l]``, or ``a[..., i:j]``. They are also created by\n the built-in ``slice()`` function.\n\n Special read-only attributes: ``start`` is the lower bound;\n ``stop`` is the upper bound; ``step`` is the step value; each is\n ``None`` if omitted. These attributes can have any type.\n\n Slice objects support one method:\n\n slice.indices(self, length)\n\n This method takes a single integer argument *length* and\n computes information about the extended slice that the slice\n object would describe if applied to a sequence of *length*\n items. It returns a tuple of three integers; respectively\n these are the *start* and *stop* indices and the *step* or\n stride length of the slice. Missing or out-of-bounds indices\n are handled in a manner consistent with regular slices.\n\n New in version 2.3.\n\n Static method objects\n Static method objects provide a way of defeating the\n transformation of function objects to method objects described\n above. A static method object is a wrapper around any other\n object, usually a user-defined method object. When a static\n method object is retrieved from a class or a class instance, the\n object actually returned is the wrapped object, which is not\n subject to any further transformation. Static method objects are\n not themselves callable, although the objects they wrap usually\n are. Static method objects are created by the built-in\n ``staticmethod()`` constructor.\n\n Class method objects\n A class method object, like a static method object, is a wrapper\n around another object that alters the way in which that object\n is retrieved from classes and class instances. The behaviour of\n class method objects upon such retrieval is described above,\n under "User-defined methods". Class method objects are created\n by the built-in ``classmethod()`` constructor.\n', 'typesfunctions': '\nFunctions\n*********\n\nFunction objects are created by function definitions. The only\noperation on a function object is to call it: ``func(argument-list)``.\n\nThere are really two flavors of function objects: built-in functions\nand user-defined functions. Both support the same operation (to call\nthe function), but the implementation is different, hence the\ndifferent object types.\n\nSee *Function definitions* for more information.\n', 'typesmapping': '\nMapping Types --- ``dict``\n**************************\n\nA *mapping* object maps *hashable* values to arbitrary objects.\nMappings are mutable objects. There is currently only one standard\nmapping type, the *dictionary*. (For other containers see the built\nin ``list``, ``set``, and ``tuple`` classes, and the ``collections``\nmodule.)\n\nA dictionary\'s keys are *almost* arbitrary values. Values that are\nnot *hashable*, that is, values containing lists, dictionaries or\nother mutable types (that are compared by value rather than by object\nidentity) may not be used as keys. Numeric types used for keys obey\nthe normal rules for numeric comparison: if two numbers compare equal\n(such as ``1`` and ``1.0``) then they can be used interchangeably to\nindex the same dictionary entry. (Note however, that since computers\nstore floating-point numbers as approximations it is usually unwise to\nuse them as dictionary keys.)\n\nDictionaries can be created by placing a comma-separated list of\n``key: value`` pairs within braces, for example: ``{\'jack\': 4098,\n\'sjoerd\': 4127}`` or ``{4098: \'jack\', 4127: \'sjoerd\'}``, or by the\n``dict`` constructor.\n\nclass class dict(**kwarg)\nclass class dict(mapping, **kwarg)\nclass class dict(iterable, **kwarg)\n\n Return a new dictionary initialized from an optional positional\n argument and a possibly empty set of keyword arguments.\n\n If no positional argument is given, an empty dictionary is created.\n If a positional argument is given and it is a mapping object, a\n dictionary is created with the same key-value pairs as the mapping\n object. Otherwise, the positional argument must be an *iterator*\n object. Each item in the iterable must itself be an iterator with\n exactly two objects. The first object of each item becomes a key\n in the new dictionary, and the second object the corresponding\n value. If a key occurs more than once, the last value for that key\n becomes the corresponding value in the new dictionary.\n\n If keyword arguments are given, the keyword arguments and their\n values are added to the dictionary created from the positional\n argument. If a key being added is already present, the value from\n the keyword argument replaces the value from the positional\n argument.\n\n To illustrate, the following examples all return a dictionary equal\n to ``{"one": 1, "two": 2, "three": 3}``:\n\n >>> a = dict(one=1, two=2, three=3)\n >>> b = {\'one\': 1, \'two\': 2, \'three\': 3}\n >>> c = dict(zip([\'one\', \'two\', \'three\'], [1, 2, 3]))\n >>> d = dict([(\'two\', 2), (\'one\', 1), (\'three\', 3)])\n >>> e = dict({\'three\': 3, \'one\': 1, \'two\': 2})\n >>> a == b == c == d == e\n True\n\n Providing keyword arguments as in the first example only works for\n keys that are valid Python identifiers. Otherwise, any valid keys\n can be used.\n\n New in version 2.2.\n\n Changed in version 2.3: Support for building a dictionary from\n keyword arguments added.\n\n These are the operations that dictionaries support (and therefore,\n custom mapping types should support too):\n\n len(d)\n\n Return the number of items in the dictionary *d*.\n\n d[key]\n\n Return the item of *d* with key *key*. Raises a ``KeyError`` if\n *key* is not in the map.\n\n New in version 2.5: If a subclass of dict defines a method\n ``__missing__()``, if the key *key* is not present, the\n ``d[key]`` operation calls that method with the key *key* as\n argument. The ``d[key]`` operation then returns or raises\n whatever is returned or raised by the ``__missing__(key)`` call\n if the key is not present. No other operations or methods invoke\n ``__missing__()``. If ``__missing__()`` is not defined,\n ``KeyError`` is raised. ``__missing__()`` must be a method; it\n cannot be an instance variable. For an example, see\n ``collections.defaultdict``.\n\n d[key] = value\n\n Set ``d[key]`` to *value*.\n\n del d[key]\n\n Remove ``d[key]`` from *d*. Raises a ``KeyError`` if *key* is\n not in the map.\n\n key in d\n\n Return ``True`` if *d* has a key *key*, else ``False``.\n\n New in version 2.2.\n\n key not in d\n\n Equivalent to ``not key in d``.\n\n New in version 2.2.\n\n iter(d)\n\n Return an iterator over the keys of the dictionary. This is a\n shortcut for ``iterkeys()``.\n\n clear()\n\n Remove all items from the dictionary.\n\n copy()\n\n Return a shallow copy of the dictionary.\n\n fromkeys(seq[, value])\n\n Create a new dictionary with keys from *seq* and values set to\n *value*.\n\n ``fromkeys()`` is a class method that returns a new dictionary.\n *value* defaults to ``None``.\n\n New in version 2.3.\n\n get(key[, default])\n\n Return the value for *key* if *key* is in the dictionary, else\n *default*. If *default* is not given, it defaults to ``None``,\n so that this method never raises a ``KeyError``.\n\n has_key(key)\n\n Test for the presence of *key* in the dictionary. ``has_key()``\n is deprecated in favor of ``key in d``.\n\n items()\n\n Return a copy of the dictionary\'s list of ``(key, value)``\n pairs.\n\n **CPython implementation detail:** Keys and values are listed in\n an arbitrary order which is non-random, varies across Python\n implementations, and depends on the dictionary\'s history of\n insertions and deletions.\n\n If ``items()``, ``keys()``, ``values()``, ``iteritems()``,\n ``iterkeys()``, and ``itervalues()`` are called with no\n intervening modifications to the dictionary, the lists will\n directly correspond. This allows the creation of ``(value,\n key)`` pairs using ``zip()``: ``pairs = zip(d.values(),\n d.keys())``. The same relationship holds for the ``iterkeys()``\n and ``itervalues()`` methods: ``pairs = zip(d.itervalues(),\n d.iterkeys())`` provides the same value for ``pairs``. Another\n way to create the same list is ``pairs = [(v, k) for (k, v) in\n d.iteritems()]``.\n\n iteritems()\n\n Return an iterator over the dictionary\'s ``(key, value)`` pairs.\n See the note for ``dict.items()``.\n\n Using ``iteritems()`` while adding or deleting entries in the\n dictionary may raise a ``RuntimeError`` or fail to iterate over\n all entries.\n\n New in version 2.2.\n\n iterkeys()\n\n Return an iterator over the dictionary\'s keys. See the note for\n ``dict.items()``.\n\n Using ``iterkeys()`` while adding or deleting entries in the\n dictionary may raise a ``RuntimeError`` or fail to iterate over\n all entries.\n\n New in version 2.2.\n\n itervalues()\n\n Return an iterator over the dictionary\'s values. See the note\n for ``dict.items()``.\n\n Using ``itervalues()`` while adding or deleting entries in the\n dictionary may raise a ``RuntimeError`` or fail to iterate over\n all entries.\n\n New in version 2.2.\n\n keys()\n\n Return a copy of the dictionary\'s list of keys. See the note\n for ``dict.items()``.\n\n pop(key[, default])\n\n If *key* is in the dictionary, remove it and return its value,\n else return *default*. If *default* is not given and *key* is\n not in the dictionary, a ``KeyError`` is raised.\n\n New in version 2.3.\n\n popitem()\n\n Remove and return an arbitrary ``(key, value)`` pair from the\n dictionary.\n\n ``popitem()`` is useful to destructively iterate over a\n dictionary, as often used in set algorithms. If the dictionary\n is empty, calling ``popitem()`` raises a ``KeyError``.\n\n setdefault(key[, default])\n\n If *key* is in the dictionary, return its value. If not, insert\n *key* with a value of *default* and return *default*. *default*\n defaults to ``None``.\n\n update([other])\n\n Update the dictionary with the key/value pairs from *other*,\n overwriting existing keys. Return ``None``.\n\n ``update()`` accepts either another dictionary object or an\n iterable of key/value pairs (as tuples or other iterables of\n length two). If keyword arguments are specified, the dictionary\n is then updated with those key/value pairs: ``d.update(red=1,\n blue=2)``.\n\n Changed in version 2.4: Allowed the argument to be an iterable\n of key/value pairs and allowed keyword arguments.\n\n values()\n\n Return a copy of the dictionary\'s list of values. See the note\n for ``dict.items()``.\n\n viewitems()\n\n Return a new view of the dictionary\'s items (``(key, value)``\n pairs). See below for documentation of view objects.\n\n New in version 2.7.\n\n viewkeys()\n\n Return a new view of the dictionary\'s keys. See below for\n documentation of view objects.\n\n New in version 2.7.\n\n viewvalues()\n\n Return a new view of the dictionary\'s values. See below for\n documentation of view objects.\n\n New in version 2.7.\n\n\nDictionary view objects\n=======================\n\nThe objects returned by ``dict.viewkeys()``, ``dict.viewvalues()`` and\n``dict.viewitems()`` are *view objects*. They provide a dynamic view\non the dictionary\'s entries, which means that when the dictionary\nchanges, the view reflects these changes.\n\nDictionary views can be iterated over to yield their respective data,\nand support membership tests:\n\nlen(dictview)\n\n Return the number of entries in the dictionary.\n\niter(dictview)\n\n Return an iterator over the keys, values or items (represented as\n tuples of ``(key, value)``) in the dictionary.\n\n Keys and values are iterated over in an arbitrary order which is\n non-random, varies across Python implementations, and depends on\n the dictionary\'s history of insertions and deletions. If keys,\n values and items views are iterated over with no intervening\n modifications to the dictionary, the order of items will directly\n correspond. This allows the creation of ``(value, key)`` pairs\n using ``zip()``: ``pairs = zip(d.values(), d.keys())``. Another\n way to create the same list is ``pairs = [(v, k) for (k, v) in\n d.items()]``.\n\n Iterating views while adding or deleting entries in the dictionary\n may raise a ``RuntimeError`` or fail to iterate over all entries.\n\nx in dictview\n\n Return ``True`` if *x* is in the underlying dictionary\'s keys,\n values or items (in the latter case, *x* should be a ``(key,\n value)`` tuple).\n\nKeys views are set-like since their entries are unique and hashable.\nIf all values are hashable, so that (key, value) pairs are unique and\nhashable, then the items view is also set-like. (Values views are not\ntreated as set-like since the entries are generally not unique.) Then\nthese set operations are available ("other" refers either to another\nview or a set):\n\ndictview & other\n\n Return the intersection of the dictview and the other object as a\n new set.\n\ndictview | other\n\n Return the union of the dictview and the other object as a new set.\n\ndictview - other\n\n Return the difference between the dictview and the other object\n (all elements in *dictview* that aren\'t in *other*) as a new set.\n\ndictview ^ other\n\n Return the symmetric difference (all elements either in *dictview*\n or *other*, but not in both) of the dictview and the other object\n as a new set.\n\nAn example of dictionary view usage:\n\n >>> dishes = {\'eggs\': 2, \'sausage\': 1, \'bacon\': 1, \'spam\': 500}\n >>> keys = dishes.viewkeys()\n >>> values = dishes.viewvalues()\n\n >>> # iteration\n >>> n = 0\n >>> for val in values:\n ... n += val\n >>> print(n)\n 504\n\n >>> # keys and values are iterated over in the same order\n >>> list(keys)\n [\'eggs\', \'bacon\', \'sausage\', \'spam\']\n >>> list(values)\n [2, 1, 1, 500]\n\n >>> # view objects are dynamic and reflect dict changes\n >>> del dishes[\'eggs\']\n >>> del dishes[\'sausage\']\n >>> list(keys)\n [\'spam\', \'bacon\']\n\n >>> # set operations\n >>> keys & {\'eggs\', \'bacon\', \'salad\'}\n {\'bacon\'}\n', 'typesmethods': '\nMethods\n*******\n\nMethods are functions that are called using the attribute notation.\nThere are two flavors: built-in methods (such as ``append()`` on\nlists) and class instance methods. Built-in methods are described\nwith the types that support them.\n\nThe implementation adds two special read-only attributes to class\ninstance methods: ``m.im_self`` is the object on which the method\noperates, and ``m.im_func`` is the function implementing the method.\nCalling ``m(arg-1, arg-2, ..., arg-n)`` is completely equivalent to\ncalling ``m.im_func(m.im_self, arg-1, arg-2, ..., arg-n)``.\n\nClass instance methods are either *bound* or *unbound*, referring to\nwhether the method was accessed through an instance or a class,\nrespectively. When a method is unbound, its ``im_self`` attribute\nwill be ``None`` and if called, an explicit ``self`` object must be\npassed as the first argument. In this case, ``self`` must be an\ninstance of the unbound method\'s class (or a subclass of that class),\notherwise a ``TypeError`` is raised.\n\nLike function objects, methods objects support getting arbitrary\nattributes. However, since method attributes are actually stored on\nthe underlying function object (``meth.im_func``), setting method\nattributes on either bound or unbound methods is disallowed.\nAttempting to set an attribute on a method results in an\n``AttributeError`` being raised. In order to set a method attribute,\nyou need to explicitly set it on the underlying function object:\n\n >>> class C:\n ... def method(self):\n ... pass\n ...\n >>> c = C()\n >>> c.method.whoami = \'my name is method\' # can\'t set on the method\n Traceback (most recent call last):\n File "", line 1, in \n AttributeError: \'instancemethod\' object has no attribute \'whoami\'\n >>> c.method.im_func.whoami = \'my name is method\'\n >>> c.method.whoami\n \'my name is method\'\n\nSee *The standard type hierarchy* for more information.\n', 'typesmodules': "\nModules\n*******\n\nThe only special operation on a module is attribute access:\n``m.name``, where *m* is a module and *name* accesses a name defined\nin *m*'s symbol table. Module attributes can be assigned to. (Note\nthat the ``import`` statement is not, strictly speaking, an operation\non a module object; ``import foo`` does not require a module object\nnamed *foo* to exist, rather it requires an (external) *definition*\nfor a module named *foo* somewhere.)\n\nA special attribute of every module is ``__dict__``. This is the\ndictionary containing the module's symbol table. Modifying this\ndictionary will actually change the module's symbol table, but direct\nassignment to the ``__dict__`` attribute is not possible (you can\nwrite ``m.__dict__['a'] = 1``, which defines ``m.a`` to be ``1``, but\nyou can't write ``m.__dict__ = {}``). Modifying ``__dict__`` directly\nis not recommended.\n\nModules built into the interpreter are written like this: ````. If loaded from a file, they are written as\n````.\n", - 'typesseq': '\nSequence Types --- ``str``, ``unicode``, ``list``, ``tuple``, ``bytearray``, ``buffer``, ``xrange``\n***************************************************************************************************\n\nThere are seven sequence types: strings, Unicode strings, lists,\ntuples, bytearrays, buffers, and xrange objects.\n\nFor other containers see the built in ``dict`` and ``set`` classes,\nand the ``collections`` module.\n\nString literals are written in single or double quotes: ``\'xyzzy\'``,\n``"frobozz"``. See *String literals* for more about string literals.\nUnicode strings are much like strings, but are specified in the syntax\nusing a preceding ``\'u\'`` character: ``u\'abc\'``, ``u"def"``. In\naddition to the functionality described here, there are also string-\nspecific methods described in the *String Methods* section. Lists are\nconstructed with square brackets, separating items with commas: ``[a,\nb, c]``. Tuples are constructed by the comma operator (not within\nsquare brackets), with or without enclosing parentheses, but an empty\ntuple must have the enclosing parentheses, such as ``a, b, c`` or\n``()``. A single item tuple must have a trailing comma, such as\n``(d,)``.\n\nBytearray objects are created with the built-in function\n``bytearray()``.\n\nBuffer objects are not directly supported by Python syntax, but can be\ncreated by calling the built-in function ``buffer()``. They don\'t\nsupport concatenation or repetition.\n\nObjects of type xrange are similar to buffers in that there is no\nspecific syntax to create them, but they are created using the\n``xrange()`` function. They don\'t support slicing, concatenation or\nrepetition, and using ``in``, ``not in``, ``min()`` or ``max()`` on\nthem is inefficient.\n\nMost sequence types support the following operations. The ``in`` and\n``not in`` operations have the same priorities as the comparison\noperations. The ``+`` and ``*`` operations have the same priority as\nthe corresponding numeric operations. [3] Additional methods are\nprovided for *Mutable Sequence Types*.\n\nThis table lists the sequence operations sorted in ascending priority\n(operations in the same box have the same priority). In the table,\n*s* and *t* are sequences of the same type; *n*, *i* and *j* are\nintegers:\n\n+--------------------+----------------------------------+------------+\n| Operation | Result | Notes |\n+====================+==================================+============+\n| ``x in s`` | ``True`` if an item of *s* is | (1) |\n| | equal to *x*, else ``False`` | |\n+--------------------+----------------------------------+------------+\n| ``x not in s`` | ``False`` if an item of *s* is | (1) |\n| | equal to *x*, else ``True`` | |\n+--------------------+----------------------------------+------------+\n| ``s + t`` | the concatenation of *s* and *t* | (6) |\n+--------------------+----------------------------------+------------+\n| ``s * n, n * s`` | *n* shallow copies of *s* | (2) |\n| | concatenated | |\n+--------------------+----------------------------------+------------+\n| ``s[i]`` | *i*th item of *s*, origin 0 | (3) |\n+--------------------+----------------------------------+------------+\n| ``s[i:j]`` | slice of *s* from *i* to *j* | (3)(4) |\n+--------------------+----------------------------------+------------+\n| ``s[i:j:k]`` | slice of *s* from *i* to *j* | (3)(5) |\n| | with step *k* | |\n+--------------------+----------------------------------+------------+\n| ``len(s)`` | length of *s* | |\n+--------------------+----------------------------------+------------+\n| ``min(s)`` | smallest item of *s* | |\n+--------------------+----------------------------------+------------+\n| ``max(s)`` | largest item of *s* | |\n+--------------------+----------------------------------+------------+\n| ``s.index(i)`` | index of the first occurence of | |\n| | *i* in *s* | |\n+--------------------+----------------------------------+------------+\n| ``s.count(i)`` | total number of occurences of | |\n| | *i* in *s* | |\n+--------------------+----------------------------------+------------+\n\nSequence types also support comparisons. In particular, tuples and\nlists are compared lexicographically by comparing corresponding\nelements. This means that to compare equal, every element must compare\nequal and the two sequences must be of the same type and have the same\nlength. (For full details see *Comparisons* in the language\nreference.)\n\nNotes:\n\n1. When *s* is a string or Unicode string object the ``in`` and ``not\n in`` operations act like a substring test. In Python versions\n before 2.3, *x* had to be a string of length 1. In Python 2.3 and\n beyond, *x* may be a string of any length.\n\n2. Values of *n* less than ``0`` are treated as ``0`` (which yields an\n empty sequence of the same type as *s*). Note also that the copies\n are shallow; nested structures are not copied. This often haunts\n new Python programmers; consider:\n\n >>> lists = [[]] * 3\n >>> lists\n [[], [], []]\n >>> lists[0].append(3)\n >>> lists\n [[3], [3], [3]]\n\n What has happened is that ``[[]]`` is a one-element list containing\n an empty list, so all three elements of ``[[]] * 3`` are (pointers\n to) this single empty list. Modifying any of the elements of\n ``lists`` modifies this single list. You can create a list of\n different lists this way:\n\n >>> lists = [[] for i in range(3)]\n >>> lists[0].append(3)\n >>> lists[1].append(5)\n >>> lists[2].append(7)\n >>> lists\n [[3], [5], [7]]\n\n3. If *i* or *j* is negative, the index is relative to the end of the\n string: ``len(s) + i`` or ``len(s) + j`` is substituted. But note\n that ``-0`` is still ``0``.\n\n4. The slice of *s* from *i* to *j* is defined as the sequence of\n items with index *k* such that ``i <= k < j``. If *i* or *j* is\n greater than ``len(s)``, use ``len(s)``. If *i* is omitted or\n ``None``, use ``0``. If *j* is omitted or ``None``, use\n ``len(s)``. If *i* is greater than or equal to *j*, the slice is\n empty.\n\n5. The slice of *s* from *i* to *j* with step *k* is defined as the\n sequence of items with index ``x = i + n*k`` such that ``0 <= n <\n (j-i)/k``. In other words, the indices are ``i``, ``i+k``,\n ``i+2*k``, ``i+3*k`` and so on, stopping when *j* is reached (but\n never including *j*). If *i* or *j* is greater than ``len(s)``,\n use ``len(s)``. If *i* or *j* are omitted or ``None``, they become\n "end" values (which end depends on the sign of *k*). Note, *k*\n cannot be zero. If *k* is ``None``, it is treated like ``1``.\n\n6. **CPython implementation detail:** If *s* and *t* are both strings,\n some Python implementations such as CPython can usually perform an\n in-place optimization for assignments of the form ``s = s + t`` or\n ``s += t``. When applicable, this optimization makes quadratic\n run-time much less likely. This optimization is both version and\n implementation dependent. For performance sensitive code, it is\n preferable to use the ``str.join()`` method which assures\n consistent linear concatenation performance across versions and\n implementations.\n\n Changed in version 2.4: Formerly, string concatenation never\n occurred in-place.\n\n\nString Methods\n==============\n\nBelow are listed the string methods which both 8-bit strings and\nUnicode objects support. Some of them are also available on\n``bytearray`` objects.\n\nIn addition, Python\'s strings support the sequence type methods\ndescribed in the *Sequence Types --- str, unicode, list, tuple,\nbytearray, buffer, xrange* section. To output formatted strings use\ntemplate strings or the ``%`` operator described in the *String\nFormatting Operations* section. Also, see the ``re`` module for string\nfunctions based on regular expressions.\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.decode([encoding[, errors]])\n\n Decodes the string using the codec registered for *encoding*.\n *encoding* defaults to the default string encoding. *errors* may\n be given to set a different error handling scheme. The default is\n ``\'strict\'``, meaning that encoding errors raise ``UnicodeError``.\n Other possible values are ``\'ignore\'``, ``\'replace\'`` and any other\n name registered via ``codecs.register_error()``, see section *Codec\n Base Classes*.\n\n New in version 2.2.\n\n Changed in version 2.3: Support for other error handling schemes\n added.\n\n Changed in version 2.7: Support for keyword arguments added.\n\nstr.encode([encoding[, errors]])\n\n Return an encoded version of the string. Default encoding is the\n current default string encoding. *errors* may be given to set a\n different error handling scheme. The default for *errors* is\n ``\'strict\'``, meaning that encoding errors raise a\n ``UnicodeError``. Other possible values are ``\'ignore\'``,\n ``\'replace\'``, ``\'xmlcharrefreplace\'``, ``\'backslashreplace\'`` and\n any other name registered via ``codecs.register_error()``, see\n section *Codec Base Classes*. For a list of possible encodings, see\n section *Standard Encodings*.\n\n New in version 2.0.\n\n Changed in version 2.3: Support for ``\'xmlcharrefreplace\'`` and\n ``\'backslashreplace\'`` and other error handling schemes added.\n\n Changed in version 2.7: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return ``True`` if the string ends with the specified *suffix*,\n otherwise return ``False``. *suffix* can also be a tuple of\n suffixes to look for. With optional *start*, test beginning at\n that position. With optional *end*, stop comparing at that\n position.\n\n Changed in version 2.5: Accept tuples as *suffix*.\n\nstr.expandtabs([tabsize])\n\n Return a copy of the string where all tab characters are replaced\n by one or more spaces, depending on the current column and the\n given tab size. The column number is reset to zero after each\n newline occurring in the string. If *tabsize* is not given, a tab\n size of ``8`` characters is assumed. This doesn\'t understand other\n non-printing characters or escape sequences.\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` if *sub* is not found.\n\n Note: The ``find()`` method should be used only if you need to know the\n position of *sub*. To check if *sub* is a substring or not, use\n the ``in`` operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces ``{}``. Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\n This method of string formatting is the new standard in Python 3,\n and should be preferred to the ``%`` formatting described in\n *String Formatting Operations* in new code.\n\n New in version 2.6.\n\nstr.index(sub[, start[, end]])\n\n Like ``find()``, but raise ``ValueError`` when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. The separator between elements is the\n string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\n New in version 2.5.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\n is not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\n New in version 2.5.\n\nstr.rsplit([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n ``None``, any whitespace string is a separator. Except for\n splitting from the right, ``rsplit()`` behaves like ``split()``\n which is described in detail below.\n\n New in version 2.4.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.split([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most ``maxsplit+1``\n elements). If *maxsplit* is not specified or ``-1``, then there is\n no limit on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n ``\'1,,2\'.split(\',\')`` returns ``[\'1\', \'\', \'2\']``). The *sep*\n argument may consist of multiple characters (for example,\n ``\'1<>2<>3\'.split(\'<>\')`` returns ``[\'1\', \'2\', \'3\']``). Splitting\n an empty string with a specified separator returns ``[\'\']``.\n\n If *sep* is not specified or is ``None``, a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a ``None`` separator returns\n ``[]``.\n\n For example, ``\' 1 2 3 \'.split()`` returns ``[\'1\', \'2\', \'3\']``,\n and ``\' 1 2 3 \'.split(None, 1)`` returns ``[\'1\', \'2 3 \']``.\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. This method uses the *universal newlines* approach to\n splitting lines. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\n For example, ``\'ab c\\n\\nde fg\\rkl\\r\\n\'.splitlines()`` returns\n ``[\'ab c\', \'\', \'de fg\', \'kl\']``, while the same call with\n ``splitlines(True)`` returns ``[\'ab c\\n\', \'\\n\', \'de fg\\r\',\n \'kl\\r\\n\']``.\n\n Unlike ``split()`` when a delimiter string *sep* is given, this\n method returns an empty list for the empty string, and a terminal\n line break does not result in an extra line.\n\nstr.startswith(prefix[, start[, end]])\n\n Return ``True`` if string starts with the *prefix*, otherwise\n return ``False``. *prefix* can also be a tuple of prefixes to look\n for. With optional *start*, test string beginning at that\n position. With optional *end*, stop comparing string at that\n position.\n\n Changed in version 2.5: Accept tuples as *prefix*.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or ``None``, the\n *chars* argument defaults to removing whitespace. The *chars*\n argument is not a prefix or suffix; rather, all combinations of its\n values are stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n ... lambda mo: mo.group(0)[0].upper() +\n ... mo.group(0)[1:].lower(),\n ... s)\n ...\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.translate(table[, deletechars])\n\n Return a copy of the string where all characters occurring in the\n optional argument *deletechars* are removed, and the remaining\n characters have been mapped through the given translation table,\n which must be a string of length 256.\n\n You can use the ``maketrans()`` helper function in the ``string``\n module to create a translation table. For string objects, set the\n *table* argument to ``None`` for translations that only delete\n characters:\n\n >>> \'read this short text\'.translate(None, \'aeiou\')\n \'rd ths shrt txt\'\n\n New in version 2.6: Support for a ``None`` *table* argument.\n\n For Unicode objects, the ``translate()`` method does not accept the\n optional *deletechars* argument. Instead, it returns a copy of the\n *s* where all characters have been mapped through the given\n translation table which must be a mapping of Unicode ordinals to\n Unicode ordinals, Unicode strings or ``None``. Unmapped characters\n are left untouched. Characters mapped to ``None`` are deleted.\n Note, a more flexible approach is to create a custom character\n mapping codec using the ``codecs`` module (see ``encodings.cp1251``\n for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that ``str.upper().isupper()`` might\n be ``False`` if ``s`` contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to ``len(s)``.\n\n New in version 2.2.2.\n\nThe following methods are present only on unicode objects:\n\nunicode.isnumeric()\n\n Return ``True`` if there are only numeric characters in S,\n ``False`` otherwise. Numeric characters include digit characters,\n and all characters that have the Unicode numeric value property,\n e.g. U+2155, VULGAR FRACTION ONE FIFTH.\n\nunicode.isdecimal()\n\n Return ``True`` if there are only decimal characters in S,\n ``False`` otherwise. Decimal characters include digit characters,\n and all characters that can be used to form decimal-radix numbers,\n e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n\n\nString Formatting Operations\n============================\n\nString and Unicode objects have one unique built-in operation: the\n``%`` operator (modulo). This is also known as the string\n*formatting* or *interpolation* operator. Given ``format % values``\n(where *format* is a string or Unicode object), ``%`` conversion\nspecifications in *format* are replaced with zero or more elements of\n*values*. The effect is similar to the using ``sprintf()`` in the C\nlanguage. If *format* is a Unicode object, or if any of the objects\nbeing converted using the ``%s`` conversion are Unicode objects, the\nresult will also be a Unicode object.\n\nIf *format* requires a single argument, *values* may be a single non-\ntuple object. [5] Otherwise, *values* must be a tuple with exactly\nthe number of items specified by the format string, or a single\nmapping object (for example, a dictionary).\n\nA conversion specifier contains two or more characters and has the\nfollowing components, which must occur in this order:\n\n1. The ``\'%\'`` character, which marks the start of the specifier.\n\n2. Mapping key (optional), consisting of a parenthesised sequence of\n characters (for example, ``(somename)``).\n\n3. Conversion flags (optional), which affect the result of some\n conversion types.\n\n4. Minimum field width (optional). If specified as an ``\'*\'``\n (asterisk), the actual width is read from the next element of the\n tuple in *values*, and the object to convert comes after the\n minimum field width and optional precision.\n\n5. Precision (optional), given as a ``\'.\'`` (dot) followed by the\n precision. If specified as ``\'*\'`` (an asterisk), the actual width\n is read from the next element of the tuple in *values*, and the\n value to convert comes after the precision.\n\n6. Length modifier (optional).\n\n7. Conversion type.\n\nWhen the right argument is a dictionary (or other mapping type), then\nthe formats in the string *must* include a parenthesised mapping key\ninto that dictionary inserted immediately after the ``\'%\'`` character.\nThe mapping key selects the value to be formatted from the mapping.\nFor example:\n\n>>> print \'%(language)s has %(number)03d quote types.\' % \\\n... {"language": "Python", "number": 2}\nPython has 002 quote types.\n\nIn this case no ``*`` specifiers may occur in a format (since they\nrequire a sequential parameter list).\n\nThe conversion flag characters are:\n\n+-----------+-----------------------------------------------------------------------+\n| Flag | Meaning |\n+===========+=======================================================================+\n| ``\'#\'`` | The value conversion will use the "alternate form" (where defined |\n| | below). |\n+-----------+-----------------------------------------------------------------------+\n| ``\'0\'`` | The conversion will be zero padded for numeric values. |\n+-----------+-----------------------------------------------------------------------+\n| ``\'-\'`` | The converted value is left adjusted (overrides the ``\'0\'`` |\n| | conversion if both are given). |\n+-----------+-----------------------------------------------------------------------+\n| ``\' \'`` | (a space) A blank should be left before a positive number (or empty |\n| | string) produced by a signed conversion. |\n+-----------+-----------------------------------------------------------------------+\n| ``\'+\'`` | A sign character (``\'+\'`` or ``\'-\'``) will precede the conversion |\n| | (overrides a "space" flag). |\n+-----------+-----------------------------------------------------------------------+\n\nA length modifier (``h``, ``l``, or ``L``) may be present, but is\nignored as it is not necessary for Python -- so e.g. ``%ld`` is\nidentical to ``%d``.\n\nThe conversion types are:\n\n+--------------+-------------------------------------------------------+---------+\n| Conversion | Meaning | Notes |\n+==============+=======================================================+=========+\n| ``\'d\'`` | Signed integer decimal. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'i\'`` | Signed integer decimal. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'o\'`` | Signed octal value. | (1) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'u\'`` | Obsolete type -- it is identical to ``\'d\'``. | (7) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'x\'`` | Signed hexadecimal (lowercase). | (2) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'X\'`` | Signed hexadecimal (uppercase). | (2) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'e\'`` | Floating point exponential format (lowercase). | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'E\'`` | Floating point exponential format (uppercase). | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'f\'`` | Floating point decimal format. | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'F\'`` | Floating point decimal format. | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'g\'`` | Floating point format. Uses lowercase exponential | (4) |\n| | format if exponent is less than -4 or not less than | |\n| | precision, decimal format otherwise. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'G\'`` | Floating point format. Uses uppercase exponential | (4) |\n| | format if exponent is less than -4 or not less than | |\n| | precision, decimal format otherwise. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'c\'`` | Single character (accepts integer or single character | |\n| | string). | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'r\'`` | String (converts any Python object using ``repr()``). | (5) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'s\'`` | String (converts any Python object using ``str()``). | (6) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'%\'`` | No argument is converted, results in a ``\'%\'`` | |\n| | character in the result. | |\n+--------------+-------------------------------------------------------+---------+\n\nNotes:\n\n1. The alternate form causes a leading zero (``\'0\'``) to be inserted\n between left-hand padding and the formatting of the number if the\n leading character of the result is not already a zero.\n\n2. The alternate form causes a leading ``\'0x\'`` or ``\'0X\'`` (depending\n on whether the ``\'x\'`` or ``\'X\'`` format was used) to be inserted\n between left-hand padding and the formatting of the number if the\n leading character of the result is not already a zero.\n\n3. The alternate form causes the result to always contain a decimal\n point, even if no digits follow it.\n\n The precision determines the number of digits after the decimal\n point and defaults to 6.\n\n4. The alternate form causes the result to always contain a decimal\n point, and trailing zeroes are not removed as they would otherwise\n be.\n\n The precision determines the number of significant digits before\n and after the decimal point and defaults to 6.\n\n5. The ``%r`` conversion was added in Python 2.0.\n\n The precision determines the maximal number of characters used.\n\n6. If the object or format provided is a ``unicode`` string, the\n resulting string will also be ``unicode``.\n\n The precision determines the maximal number of characters used.\n\n7. See **PEP 237**.\n\nSince Python strings have an explicit length, ``%s`` conversions do\nnot assume that ``\'\\0\'`` is the end of the string.\n\nChanged in version 2.7: ``%f`` conversions for numbers whose absolute\nvalue is over 1e50 are no longer replaced by ``%g`` conversions.\n\nAdditional string operations are defined in standard modules\n``string`` and ``re``.\n\n\nXRange Type\n===========\n\nThe ``xrange`` type is an immutable sequence which is commonly used\nfor looping. The advantage of the ``xrange`` type is that an\n``xrange`` object will always take the same amount of memory, no\nmatter the size of the range it represents. There are no consistent\nperformance advantages.\n\nXRange objects have very little behavior: they only support indexing,\niteration, and the ``len()`` function.\n\n\nMutable Sequence Types\n======================\n\nList and ``bytearray`` objects support additional operations that\nallow in-place modification of the object. Other mutable sequence\ntypes (when added to the language) should also support these\noperations. Strings and tuples are immutable sequence types: such\nobjects cannot be modified once created. The following operations are\ndefined on mutable sequence types (where *x* is an arbitrary object):\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\n| | *x* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\n| | replaced by the contents of the | |\n| | iterable *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\n| | replaced by those of *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j:k]`` | removes the elements of | |\n| | ``s[i:j:k]`` from the list | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.append(x)`` | same as ``s[len(s):len(s)] = | (2) |\n| | [x]`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.extend(x)`` | same as ``s[len(s):len(s)] = x`` | (3) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.count(x)`` | return number of *i*\'s for which | |\n| | ``s[i] == x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.index(x[, i[, j]])`` | return smallest *k* such that | (4) |\n| | ``s[k] == x`` and ``i <= k < j`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.insert(i, x)`` | same as ``s[i:i] = [x]`` | (5) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.pop([i])`` | same as ``x = s[i]; del s[i]; | (6) |\n| | return x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.remove(x)`` | same as ``del s[s.index(x)]`` | (4) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.reverse()`` | reverses the items of *s* in | (7) |\n| | place | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.sort([cmp[, key[, | sort the items of *s* in place | (7)(8)(9)(10) |\n| reverse]]])`` | | |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. The C implementation of Python has historically accepted multiple\n parameters and implicitly joined them into a tuple; this no longer\n works in Python 2.0. Use of this misfeature has been deprecated\n since Python 1.4.\n\n3. *x* can be any iterable object.\n\n4. Raises ``ValueError`` when *x* is not found in *s*. When a negative\n index is passed as the second or third parameter to the ``index()``\n method, the list length is added, as for slice indices. If it is\n still negative, it is truncated to zero, as for slice indices.\n\n Changed in version 2.3: Previously, ``index()`` didn\'t have\n arguments for specifying start and stop positions.\n\n5. When a negative index is passed as the first parameter to the\n ``insert()`` method, the list length is added, as for slice\n indices. If it is still negative, it is truncated to zero, as for\n slice indices.\n\n Changed in version 2.3: Previously, all negative indices were\n truncated to zero.\n\n6. The ``pop()`` method is only supported by the list and array types.\n The optional argument *i* defaults to ``-1``, so that by default\n the last item is removed and returned.\n\n7. The ``sort()`` and ``reverse()`` methods modify the list in place\n for economy of space when sorting or reversing a large list. To\n remind you that they operate by side effect, they don\'t return the\n sorted or reversed list.\n\n8. The ``sort()`` method takes optional arguments for controlling the\n comparisons.\n\n *cmp* specifies a custom comparison function of two arguments (list\n items) which should return a negative, zero or positive number\n depending on whether the first argument is considered smaller than,\n equal to, or larger than the second argument: ``cmp=lambda x,y:\n cmp(x.lower(), y.lower())``. The default value is ``None``.\n\n *key* specifies a function of one argument that is used to extract\n a comparison key from each list element: ``key=str.lower``. The\n default value is ``None``.\n\n *reverse* is a boolean value. If set to ``True``, then the list\n elements are sorted as if each comparison were reversed.\n\n In general, the *key* and *reverse* conversion processes are much\n faster than specifying an equivalent *cmp* function. This is\n because *cmp* is called multiple times for each list element while\n *key* and *reverse* touch each element only once. Use\n ``functools.cmp_to_key()`` to convert an old-style *cmp* function\n to a *key* function.\n\n Changed in version 2.3: Support for ``None`` as an equivalent to\n omitting *cmp* was added.\n\n Changed in version 2.4: Support for *key* and *reverse* was added.\n\n9. Starting with Python 2.3, the ``sort()`` method is guaranteed to be\n stable. A sort is stable if it guarantees not to change the\n relative order of elements that compare equal --- this is helpful\n for sorting in multiple passes (for example, sort by department,\n then by salary grade).\n\n10. **CPython implementation detail:** While a list is being sorted,\n the effect of attempting to mutate, or even inspect, the list is\n undefined. The C implementation of Python 2.3 and newer makes the\n list appear empty for the duration, and raises ``ValueError`` if\n it can detect that the list has been mutated during a sort.\n', + 'typesseq': '\nSequence Types --- ``str``, ``unicode``, ``list``, ``tuple``, ``bytearray``, ``buffer``, ``xrange``\n***************************************************************************************************\n\nThere are seven sequence types: strings, Unicode strings, lists,\ntuples, bytearrays, buffers, and xrange objects.\n\nFor other containers see the built in ``dict`` and ``set`` classes,\nand the ``collections`` module.\n\nString literals are written in single or double quotes: ``\'xyzzy\'``,\n``"frobozz"``. See *String literals* for more about string literals.\nUnicode strings are much like strings, but are specified in the syntax\nusing a preceding ``\'u\'`` character: ``u\'abc\'``, ``u"def"``. In\naddition to the functionality described here, there are also string-\nspecific methods described in the *String Methods* section. Lists are\nconstructed with square brackets, separating items with commas: ``[a,\nb, c]``. Tuples are constructed by the comma operator (not within\nsquare brackets), with or without enclosing parentheses, but an empty\ntuple must have the enclosing parentheses, such as ``a, b, c`` or\n``()``. A single item tuple must have a trailing comma, such as\n``(d,)``.\n\nBytearray objects are created with the built-in function\n``bytearray()``.\n\nBuffer objects are not directly supported by Python syntax, but can be\ncreated by calling the built-in function ``buffer()``. They don\'t\nsupport concatenation or repetition.\n\nObjects of type xrange are similar to buffers in that there is no\nspecific syntax to create them, but they are created using the\n``xrange()`` function. They don\'t support slicing, concatenation or\nrepetition, and using ``in``, ``not in``, ``min()`` or ``max()`` on\nthem is inefficient.\n\nMost sequence types support the following operations. The ``in`` and\n``not in`` operations have the same priorities as the comparison\noperations. The ``+`` and ``*`` operations have the same priority as\nthe corresponding numeric operations. [3] Additional methods are\nprovided for *Mutable Sequence Types*.\n\nThis table lists the sequence operations sorted in ascending priority\n(operations in the same box have the same priority). In the table,\n*s* and *t* are sequences of the same type; *n*, *i* and *j* are\nintegers:\n\n+--------------------+----------------------------------+------------+\n| Operation | Result | Notes |\n+====================+==================================+============+\n| ``x in s`` | ``True`` if an item of *s* is | (1) |\n| | equal to *x*, else ``False`` | |\n+--------------------+----------------------------------+------------+\n| ``x not in s`` | ``False`` if an item of *s* is | (1) |\n| | equal to *x*, else ``True`` | |\n+--------------------+----------------------------------+------------+\n| ``s + t`` | the concatenation of *s* and *t* | (6) |\n+--------------------+----------------------------------+------------+\n| ``s * n, n * s`` | *n* shallow copies of *s* | (2) |\n| | concatenated | |\n+--------------------+----------------------------------+------------+\n| ``s[i]`` | *i*th item of *s*, origin 0 | (3) |\n+--------------------+----------------------------------+------------+\n| ``s[i:j]`` | slice of *s* from *i* to *j* | (3)(4) |\n+--------------------+----------------------------------+------------+\n| ``s[i:j:k]`` | slice of *s* from *i* to *j* | (3)(5) |\n| | with step *k* | |\n+--------------------+----------------------------------+------------+\n| ``len(s)`` | length of *s* | |\n+--------------------+----------------------------------+------------+\n| ``min(s)`` | smallest item of *s* | |\n+--------------------+----------------------------------+------------+\n| ``max(s)`` | largest item of *s* | |\n+--------------------+----------------------------------+------------+\n| ``s.index(i)`` | index of the first occurence of | |\n| | *i* in *s* | |\n+--------------------+----------------------------------+------------+\n| ``s.count(i)`` | total number of occurences of | |\n| | *i* in *s* | |\n+--------------------+----------------------------------+------------+\n\nSequence types also support comparisons. In particular, tuples and\nlists are compared lexicographically by comparing corresponding\nelements. This means that to compare equal, every element must compare\nequal and the two sequences must be of the same type and have the same\nlength. (For full details see *Comparisons* in the language\nreference.)\n\nNotes:\n\n1. When *s* is a string or Unicode string object the ``in`` and ``not\n in`` operations act like a substring test. In Python versions\n before 2.3, *x* had to be a string of length 1. In Python 2.3 and\n beyond, *x* may be a string of any length.\n\n2. Values of *n* less than ``0`` are treated as ``0`` (which yields an\n empty sequence of the same type as *s*). Note also that the copies\n are shallow; nested structures are not copied. This often haunts\n new Python programmers; consider:\n\n >>> lists = [[]] * 3\n >>> lists\n [[], [], []]\n >>> lists[0].append(3)\n >>> lists\n [[3], [3], [3]]\n\n What has happened is that ``[[]]`` is a one-element list containing\n an empty list, so all three elements of ``[[]] * 3`` are (pointers\n to) this single empty list. Modifying any of the elements of\n ``lists`` modifies this single list. You can create a list of\n different lists this way:\n\n >>> lists = [[] for i in range(3)]\n >>> lists[0].append(3)\n >>> lists[1].append(5)\n >>> lists[2].append(7)\n >>> lists\n [[3], [5], [7]]\n\n3. If *i* or *j* is negative, the index is relative to the end of the\n string: ``len(s) + i`` or ``len(s) + j`` is substituted. But note\n that ``-0`` is still ``0``.\n\n4. The slice of *s* from *i* to *j* is defined as the sequence of\n items with index *k* such that ``i <= k < j``. If *i* or *j* is\n greater than ``len(s)``, use ``len(s)``. If *i* is omitted or\n ``None``, use ``0``. If *j* is omitted or ``None``, use\n ``len(s)``. If *i* is greater than or equal to *j*, the slice is\n empty.\n\n5. The slice of *s* from *i* to *j* with step *k* is defined as the\n sequence of items with index ``x = i + n*k`` such that ``0 <= n <\n (j-i)/k``. In other words, the indices are ``i``, ``i+k``,\n ``i+2*k``, ``i+3*k`` and so on, stopping when *j* is reached (but\n never including *j*). If *i* or *j* is greater than ``len(s)``,\n use ``len(s)``. If *i* or *j* are omitted or ``None``, they become\n "end" values (which end depends on the sign of *k*). Note, *k*\n cannot be zero. If *k* is ``None``, it is treated like ``1``.\n\n6. **CPython implementation detail:** If *s* and *t* are both strings,\n some Python implementations such as CPython can usually perform an\n in-place optimization for assignments of the form ``s = s + t`` or\n ``s += t``. When applicable, this optimization makes quadratic\n run-time much less likely. This optimization is both version and\n implementation dependent. For performance sensitive code, it is\n preferable to use the ``str.join()`` method which assures\n consistent linear concatenation performance across versions and\n implementations.\n\n Changed in version 2.4: Formerly, string concatenation never\n occurred in-place.\n\n\nString Methods\n==============\n\nBelow are listed the string methods which both 8-bit strings and\nUnicode objects support. Some of them are also available on\n``bytearray`` objects.\n\nIn addition, Python\'s strings support the sequence type methods\ndescribed in the *Sequence Types --- str, unicode, list, tuple,\nbytearray, buffer, xrange* section. To output formatted strings use\ntemplate strings or the ``%`` operator described in the *String\nFormatting Operations* section. Also, see the ``re`` module for string\nfunctions based on regular expressions.\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.decode([encoding[, errors]])\n\n Decodes the string using the codec registered for *encoding*.\n *encoding* defaults to the default string encoding. *errors* may\n be given to set a different error handling scheme. The default is\n ``\'strict\'``, meaning that encoding errors raise ``UnicodeError``.\n Other possible values are ``\'ignore\'``, ``\'replace\'`` and any other\n name registered via ``codecs.register_error()``, see section *Codec\n Base Classes*.\n\n New in version 2.2.\n\n Changed in version 2.3: Support for other error handling schemes\n added.\n\n Changed in version 2.7: Support for keyword arguments added.\n\nstr.encode([encoding[, errors]])\n\n Return an encoded version of the string. Default encoding is the\n current default string encoding. *errors* may be given to set a\n different error handling scheme. The default for *errors* is\n ``\'strict\'``, meaning that encoding errors raise a\n ``UnicodeError``. Other possible values are ``\'ignore\'``,\n ``\'replace\'``, ``\'xmlcharrefreplace\'``, ``\'backslashreplace\'`` and\n any other name registered via ``codecs.register_error()``, see\n section *Codec Base Classes*. For a list of possible encodings, see\n section *Standard Encodings*.\n\n New in version 2.0.\n\n Changed in version 2.3: Support for ``\'xmlcharrefreplace\'`` and\n ``\'backslashreplace\'`` and other error handling schemes added.\n\n Changed in version 2.7: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return ``True`` if the string ends with the specified *suffix*,\n otherwise return ``False``. *suffix* can also be a tuple of\n suffixes to look for. With optional *start*, test beginning at\n that position. With optional *end*, stop comparing at that\n position.\n\n Changed in version 2.5: Accept tuples as *suffix*.\n\nstr.expandtabs([tabsize])\n\n Return a copy of the string where all tab characters are replaced\n by one or more spaces, depending on the current column and the\n given tab size. Tab positions occur every *tabsize* characters\n (default is 8, giving tab positions at columns 0, 8, 16 and so on).\n To expand the string, the current column is set to zero and the\n string is examined character by character. If the character is a\n tab (``\\t``), one or more space characters are inserted in the\n result until the current column is equal to the next tab position.\n (The tab character itself is not copied.) If the character is a\n newline (``\\n``) or return (``\\r``), it is copied and the current\n column is reset to zero. Any other character is copied unchanged\n and the current column is incremented by one regardless of how the\n character is represented when printed.\n\n >>> \'01\\t012\\t0123\\t01234\'.expandtabs()\n \'01 012 0123 01234\'\n >>> \'01\\t012\\t0123\\t01234\'.expandtabs(4)\n \'01 012 0123 01234\'\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` if *sub* is not found.\n\n Note: The ``find()`` method should be used only if you need to know the\n position of *sub*. To check if *sub* is a substring or not, use\n the ``in`` operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces ``{}``. Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\n This method of string formatting is the new standard in Python 3,\n and should be preferred to the ``%`` formatting described in\n *String Formatting Operations* in new code.\n\n New in version 2.6.\n\nstr.index(sub[, start[, end]])\n\n Like ``find()``, but raise ``ValueError`` when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. The separator between elements is the\n string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\n New in version 2.5.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within ``s[start:end]``.\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return ``-1`` on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\n is not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to ``len(s)``.\n\n Changed in version 2.4: Support for the *fillchar* argument.\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\n New in version 2.5.\n\nstr.rsplit([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n ``None``, any whitespace string is a separator. Except for\n splitting from the right, ``rsplit()`` behaves like ``split()``\n which is described in detail below.\n\n New in version 2.4.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or ``None``, the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.split([sep[, maxsplit]])\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most ``maxsplit+1``\n elements). If *maxsplit* is not specified or ``-1``, then there is\n no limit on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n ``\'1,,2\'.split(\',\')`` returns ``[\'1\', \'\', \'2\']``). The *sep*\n argument may consist of multiple characters (for example,\n ``\'1<>2<>3\'.split(\'<>\')`` returns ``[\'1\', \'2\', \'3\']``). Splitting\n an empty string with a specified separator returns ``[\'\']``.\n\n If *sep* is not specified or is ``None``, a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a ``None`` separator returns\n ``[]``.\n\n For example, ``\' 1 2 3 \'.split()`` returns ``[\'1\', \'2\', \'3\']``,\n and ``\' 1 2 3 \'.split(None, 1)`` returns ``[\'1\', \'2 3 \']``.\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. This method uses the *universal newlines* approach to\n splitting lines. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\n For example, ``\'ab c\\n\\nde fg\\rkl\\r\\n\'.splitlines()`` returns\n ``[\'ab c\', \'\', \'de fg\', \'kl\']``, while the same call with\n ``splitlines(True)`` returns ``[\'ab c\\n\', \'\\n\', \'de fg\\r\',\n \'kl\\r\\n\']``.\n\n Unlike ``split()`` when a delimiter string *sep* is given, this\n method returns an empty list for the empty string, and a terminal\n line break does not result in an extra line.\n\nstr.startswith(prefix[, start[, end]])\n\n Return ``True`` if string starts with the *prefix*, otherwise\n return ``False``. *prefix* can also be a tuple of prefixes to look\n for. With optional *start*, test string beginning at that\n position. With optional *end*, stop comparing string at that\n position.\n\n Changed in version 2.5: Accept tuples as *prefix*.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or ``None``, the\n *chars* argument defaults to removing whitespace. The *chars*\n argument is not a prefix or suffix; rather, all combinations of its\n values are stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\n Changed in version 2.2.2: Support for the *chars* argument.\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa.\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n ... lambda mo: mo.group(0)[0].upper() +\n ... mo.group(0)[1:].lower(),\n ... s)\n ...\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.translate(table[, deletechars])\n\n Return a copy of the string where all characters occurring in the\n optional argument *deletechars* are removed, and the remaining\n characters have been mapped through the given translation table,\n which must be a string of length 256.\n\n You can use the ``maketrans()`` helper function in the ``string``\n module to create a translation table. For string objects, set the\n *table* argument to ``None`` for translations that only delete\n characters:\n\n >>> \'read this short text\'.translate(None, \'aeiou\')\n \'rd ths shrt txt\'\n\n New in version 2.6: Support for a ``None`` *table* argument.\n\n For Unicode objects, the ``translate()`` method does not accept the\n optional *deletechars* argument. Instead, it returns a copy of the\n *s* where all characters have been mapped through the given\n translation table which must be a mapping of Unicode ordinals to\n Unicode ordinals, Unicode strings or ``None``. Unmapped characters\n are left untouched. Characters mapped to ``None`` are deleted.\n Note, a more flexible approach is to create a custom character\n mapping codec using the ``codecs`` module (see ``encodings.cp1251``\n for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that ``str.upper().isupper()`` might\n be ``False`` if ``s`` contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\n For 8-bit strings, this method is locale-dependent.\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to ``len(s)``.\n\n New in version 2.2.2.\n\nThe following methods are present only on unicode objects:\n\nunicode.isnumeric()\n\n Return ``True`` if there are only numeric characters in S,\n ``False`` otherwise. Numeric characters include digit characters,\n and all characters that have the Unicode numeric value property,\n e.g. U+2155, VULGAR FRACTION ONE FIFTH.\n\nunicode.isdecimal()\n\n Return ``True`` if there are only decimal characters in S,\n ``False`` otherwise. Decimal characters include digit characters,\n and all characters that can be used to form decimal-radix numbers,\n e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n\n\nString Formatting Operations\n============================\n\nString and Unicode objects have one unique built-in operation: the\n``%`` operator (modulo). This is also known as the string\n*formatting* or *interpolation* operator. Given ``format % values``\n(where *format* is a string or Unicode object), ``%`` conversion\nspecifications in *format* are replaced with zero or more elements of\n*values*. The effect is similar to the using ``sprintf()`` in the C\nlanguage. If *format* is a Unicode object, or if any of the objects\nbeing converted using the ``%s`` conversion are Unicode objects, the\nresult will also be a Unicode object.\n\nIf *format* requires a single argument, *values* may be a single non-\ntuple object. [5] Otherwise, *values* must be a tuple with exactly\nthe number of items specified by the format string, or a single\nmapping object (for example, a dictionary).\n\nA conversion specifier contains two or more characters and has the\nfollowing components, which must occur in this order:\n\n1. The ``\'%\'`` character, which marks the start of the specifier.\n\n2. Mapping key (optional), consisting of a parenthesised sequence of\n characters (for example, ``(somename)``).\n\n3. Conversion flags (optional), which affect the result of some\n conversion types.\n\n4. Minimum field width (optional). If specified as an ``\'*\'``\n (asterisk), the actual width is read from the next element of the\n tuple in *values*, and the object to convert comes after the\n minimum field width and optional precision.\n\n5. Precision (optional), given as a ``\'.\'`` (dot) followed by the\n precision. If specified as ``\'*\'`` (an asterisk), the actual width\n is read from the next element of the tuple in *values*, and the\n value to convert comes after the precision.\n\n6. Length modifier (optional).\n\n7. Conversion type.\n\nWhen the right argument is a dictionary (or other mapping type), then\nthe formats in the string *must* include a parenthesised mapping key\ninto that dictionary inserted immediately after the ``\'%\'`` character.\nThe mapping key selects the value to be formatted from the mapping.\nFor example:\n\n>>> print \'%(language)s has %(number)03d quote types.\' % \\\n... {"language": "Python", "number": 2}\nPython has 002 quote types.\n\nIn this case no ``*`` specifiers may occur in a format (since they\nrequire a sequential parameter list).\n\nThe conversion flag characters are:\n\n+-----------+-----------------------------------------------------------------------+\n| Flag | Meaning |\n+===========+=======================================================================+\n| ``\'#\'`` | The value conversion will use the "alternate form" (where defined |\n| | below). |\n+-----------+-----------------------------------------------------------------------+\n| ``\'0\'`` | The conversion will be zero padded for numeric values. |\n+-----------+-----------------------------------------------------------------------+\n| ``\'-\'`` | The converted value is left adjusted (overrides the ``\'0\'`` |\n| | conversion if both are given). |\n+-----------+-----------------------------------------------------------------------+\n| ``\' \'`` | (a space) A blank should be left before a positive number (or empty |\n| | string) produced by a signed conversion. |\n+-----------+-----------------------------------------------------------------------+\n| ``\'+\'`` | A sign character (``\'+\'`` or ``\'-\'``) will precede the conversion |\n| | (overrides a "space" flag). |\n+-----------+-----------------------------------------------------------------------+\n\nA length modifier (``h``, ``l``, or ``L``) may be present, but is\nignored as it is not necessary for Python -- so e.g. ``%ld`` is\nidentical to ``%d``.\n\nThe conversion types are:\n\n+--------------+-------------------------------------------------------+---------+\n| Conversion | Meaning | Notes |\n+==============+=======================================================+=========+\n| ``\'d\'`` | Signed integer decimal. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'i\'`` | Signed integer decimal. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'o\'`` | Signed octal value. | (1) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'u\'`` | Obsolete type -- it is identical to ``\'d\'``. | (7) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'x\'`` | Signed hexadecimal (lowercase). | (2) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'X\'`` | Signed hexadecimal (uppercase). | (2) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'e\'`` | Floating point exponential format (lowercase). | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'E\'`` | Floating point exponential format (uppercase). | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'f\'`` | Floating point decimal format. | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'F\'`` | Floating point decimal format. | (3) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'g\'`` | Floating point format. Uses lowercase exponential | (4) |\n| | format if exponent is less than -4 or not less than | |\n| | precision, decimal format otherwise. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'G\'`` | Floating point format. Uses uppercase exponential | (4) |\n| | format if exponent is less than -4 or not less than | |\n| | precision, decimal format otherwise. | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'c\'`` | Single character (accepts integer or single character | |\n| | string). | |\n+--------------+-------------------------------------------------------+---------+\n| ``\'r\'`` | String (converts any Python object using *repr()*). | (5) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'s\'`` | String (converts any Python object using ``str()``). | (6) |\n+--------------+-------------------------------------------------------+---------+\n| ``\'%\'`` | No argument is converted, results in a ``\'%\'`` | |\n| | character in the result. | |\n+--------------+-------------------------------------------------------+---------+\n\nNotes:\n\n1. The alternate form causes a leading zero (``\'0\'``) to be inserted\n between left-hand padding and the formatting of the number if the\n leading character of the result is not already a zero.\n\n2. The alternate form causes a leading ``\'0x\'`` or ``\'0X\'`` (depending\n on whether the ``\'x\'`` or ``\'X\'`` format was used) to be inserted\n between left-hand padding and the formatting of the number if the\n leading character of the result is not already a zero.\n\n3. The alternate form causes the result to always contain a decimal\n point, even if no digits follow it.\n\n The precision determines the number of digits after the decimal\n point and defaults to 6.\n\n4. The alternate form causes the result to always contain a decimal\n point, and trailing zeroes are not removed as they would otherwise\n be.\n\n The precision determines the number of significant digits before\n and after the decimal point and defaults to 6.\n\n5. The ``%r`` conversion was added in Python 2.0.\n\n The precision determines the maximal number of characters used.\n\n6. If the object or format provided is a ``unicode`` string, the\n resulting string will also be ``unicode``.\n\n The precision determines the maximal number of characters used.\n\n7. See **PEP 237**.\n\nSince Python strings have an explicit length, ``%s`` conversions do\nnot assume that ``\'\\0\'`` is the end of the string.\n\nChanged in version 2.7: ``%f`` conversions for numbers whose absolute\nvalue is over 1e50 are no longer replaced by ``%g`` conversions.\n\nAdditional string operations are defined in standard modules\n``string`` and ``re``.\n\n\nXRange Type\n===========\n\nThe ``xrange`` type is an immutable sequence which is commonly used\nfor looping. The advantage of the ``xrange`` type is that an\n``xrange`` object will always take the same amount of memory, no\nmatter the size of the range it represents. There are no consistent\nperformance advantages.\n\nXRange objects have very little behavior: they only support indexing,\niteration, and the ``len()`` function.\n\n\nMutable Sequence Types\n======================\n\nList and ``bytearray`` objects support additional operations that\nallow in-place modification of the object. Other mutable sequence\ntypes (when added to the language) should also support these\noperations. Strings and tuples are immutable sequence types: such\nobjects cannot be modified once created. The following operations are\ndefined on mutable sequence types (where *x* is an arbitrary object):\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\n| | *x* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\n| | replaced by the contents of the | |\n| | iterable *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\n| | replaced by those of *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j:k]`` | removes the elements of | |\n| | ``s[i:j:k]`` from the list | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.append(x)`` | same as ``s[len(s):len(s)] = | (2) |\n| | [x]`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.extend(x)`` | same as ``s[len(s):len(s)] = x`` | (3) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.count(x)`` | return number of *i*\'s for which | |\n| | ``s[i] == x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.index(x[, i[, j]])`` | return smallest *k* such that | (4) |\n| | ``s[k] == x`` and ``i <= k < j`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.insert(i, x)`` | same as ``s[i:i] = [x]`` | (5) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.pop([i])`` | same as ``x = s[i]; del s[i]; | (6) |\n| | return x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.remove(x)`` | same as ``del s[s.index(x)]`` | (4) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.reverse()`` | reverses the items of *s* in | (7) |\n| | place | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.sort([cmp[, key[, | sort the items of *s* in place | (7)(8)(9)(10) |\n| reverse]]])`` | | |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. The C implementation of Python has historically accepted multiple\n parameters and implicitly joined them into a tuple; this no longer\n works in Python 2.0. Use of this misfeature has been deprecated\n since Python 1.4.\n\n3. *x* can be any iterable object.\n\n4. Raises ``ValueError`` when *x* is not found in *s*. When a negative\n index is passed as the second or third parameter to the ``index()``\n method, the list length is added, as for slice indices. If it is\n still negative, it is truncated to zero, as for slice indices.\n\n Changed in version 2.3: Previously, ``index()`` didn\'t have\n arguments for specifying start and stop positions.\n\n5. When a negative index is passed as the first parameter to the\n ``insert()`` method, the list length is added, as for slice\n indices. If it is still negative, it is truncated to zero, as for\n slice indices.\n\n Changed in version 2.3: Previously, all negative indices were\n truncated to zero.\n\n6. The ``pop()`` method is only supported by the list and array types.\n The optional argument *i* defaults to ``-1``, so that by default\n the last item is removed and returned.\n\n7. The ``sort()`` and ``reverse()`` methods modify the list in place\n for economy of space when sorting or reversing a large list. To\n remind you that they operate by side effect, they don\'t return the\n sorted or reversed list.\n\n8. The ``sort()`` method takes optional arguments for controlling the\n comparisons.\n\n *cmp* specifies a custom comparison function of two arguments (list\n items) which should return a negative, zero or positive number\n depending on whether the first argument is considered smaller than,\n equal to, or larger than the second argument: ``cmp=lambda x,y:\n cmp(x.lower(), y.lower())``. The default value is ``None``.\n\n *key* specifies a function of one argument that is used to extract\n a comparison key from each list element: ``key=str.lower``. The\n default value is ``None``.\n\n *reverse* is a boolean value. If set to ``True``, then the list\n elements are sorted as if each comparison were reversed.\n\n In general, the *key* and *reverse* conversion processes are much\n faster than specifying an equivalent *cmp* function. This is\n because *cmp* is called multiple times for each list element while\n *key* and *reverse* touch each element only once. Use\n ``functools.cmp_to_key()`` to convert an old-style *cmp* function\n to a *key* function.\n\n Changed in version 2.3: Support for ``None`` as an equivalent to\n omitting *cmp* was added.\n\n Changed in version 2.4: Support for *key* and *reverse* was added.\n\n9. Starting with Python 2.3, the ``sort()`` method is guaranteed to be\n stable. A sort is stable if it guarantees not to change the\n relative order of elements that compare equal --- this is helpful\n for sorting in multiple passes (for example, sort by department,\n then by salary grade).\n\n10. **CPython implementation detail:** While a list is being sorted,\n the effect of attempting to mutate, or even inspect, the list is\n undefined. The C implementation of Python 2.3 and newer makes the\n list appear empty for the duration, and raises ``ValueError`` if\n it can detect that the list has been mutated during a sort.\n', 'typesseq-mutable': "\nMutable Sequence Types\n**********************\n\nList and ``bytearray`` objects support additional operations that\nallow in-place modification of the object. Other mutable sequence\ntypes (when added to the language) should also support these\noperations. Strings and tuples are immutable sequence types: such\nobjects cannot be modified once created. The following operations are\ndefined on mutable sequence types (where *x* is an arbitrary object):\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\n| | *x* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\n| | replaced by the contents of the | |\n| | iterable *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\n| | replaced by those of *t* | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``del s[i:j:k]`` | removes the elements of | |\n| | ``s[i:j:k]`` from the list | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.append(x)`` | same as ``s[len(s):len(s)] = | (2) |\n| | [x]`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.extend(x)`` | same as ``s[len(s):len(s)] = x`` | (3) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.count(x)`` | return number of *i*'s for which | |\n| | ``s[i] == x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.index(x[, i[, j]])`` | return smallest *k* such that | (4) |\n| | ``s[k] == x`` and ``i <= k < j`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.insert(i, x)`` | same as ``s[i:i] = [x]`` | (5) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.pop([i])`` | same as ``x = s[i]; del s[i]; | (6) |\n| | return x`` | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.remove(x)`` | same as ``del s[s.index(x)]`` | (4) |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.reverse()`` | reverses the items of *s* in | (7) |\n| | place | |\n+--------------------------------+----------------------------------+-----------------------+\n| ``s.sort([cmp[, key[, | sort the items of *s* in place | (7)(8)(9)(10) |\n| reverse]]])`` | | |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. The C implementation of Python has historically accepted multiple\n parameters and implicitly joined them into a tuple; this no longer\n works in Python 2.0. Use of this misfeature has been deprecated\n since Python 1.4.\n\n3. *x* can be any iterable object.\n\n4. Raises ``ValueError`` when *x* is not found in *s*. When a negative\n index is passed as the second or third parameter to the ``index()``\n method, the list length is added, as for slice indices. If it is\n still negative, it is truncated to zero, as for slice indices.\n\n Changed in version 2.3: Previously, ``index()`` didn't have\n arguments for specifying start and stop positions.\n\n5. When a negative index is passed as the first parameter to the\n ``insert()`` method, the list length is added, as for slice\n indices. If it is still negative, it is truncated to zero, as for\n slice indices.\n\n Changed in version 2.3: Previously, all negative indices were\n truncated to zero.\n\n6. The ``pop()`` method is only supported by the list and array types.\n The optional argument *i* defaults to ``-1``, so that by default\n the last item is removed and returned.\n\n7. The ``sort()`` and ``reverse()`` methods modify the list in place\n for economy of space when sorting or reversing a large list. To\n remind you that they operate by side effect, they don't return the\n sorted or reversed list.\n\n8. The ``sort()`` method takes optional arguments for controlling the\n comparisons.\n\n *cmp* specifies a custom comparison function of two arguments (list\n items) which should return a negative, zero or positive number\n depending on whether the first argument is considered smaller than,\n equal to, or larger than the second argument: ``cmp=lambda x,y:\n cmp(x.lower(), y.lower())``. The default value is ``None``.\n\n *key* specifies a function of one argument that is used to extract\n a comparison key from each list element: ``key=str.lower``. The\n default value is ``None``.\n\n *reverse* is a boolean value. If set to ``True``, then the list\n elements are sorted as if each comparison were reversed.\n\n In general, the *key* and *reverse* conversion processes are much\n faster than specifying an equivalent *cmp* function. This is\n because *cmp* is called multiple times for each list element while\n *key* and *reverse* touch each element only once. Use\n ``functools.cmp_to_key()`` to convert an old-style *cmp* function\n to a *key* function.\n\n Changed in version 2.3: Support for ``None`` as an equivalent to\n omitting *cmp* was added.\n\n Changed in version 2.4: Support for *key* and *reverse* was added.\n\n9. Starting with Python 2.3, the ``sort()`` method is guaranteed to be\n stable. A sort is stable if it guarantees not to change the\n relative order of elements that compare equal --- this is helpful\n for sorting in multiple passes (for example, sort by department,\n then by salary grade).\n\n10. **CPython implementation detail:** While a list is being sorted,\n the effect of attempting to mutate, or even inspect, the list is\n undefined. The C implementation of Python 2.3 and newer makes the\n list appear empty for the duration, and raises ``ValueError`` if\n it can detect that the list has been mutated during a sort.\n", 'unary': '\nUnary arithmetic and bitwise operations\n***************************************\n\nAll unary arithmetic and bitwise operations have the same priority:\n\n u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr\n\nThe unary ``-`` (minus) operator yields the negation of its numeric\nargument.\n\nThe unary ``+`` (plus) operator yields its numeric argument unchanged.\n\nThe unary ``~`` (invert) operator yields the bitwise inversion of its\nplain or long integer argument. The bitwise inversion of ``x`` is\ndefined as ``-(x+1)``. It only applies to integral numbers.\n\nIn all three cases, if the argument does not have the proper type, a\n``TypeError`` exception is raised.\n', 'while': '\nThe ``while`` statement\n***********************\n\nThe ``while`` statement is used for repeated execution as long as an\nexpression is true:\n\n while_stmt ::= "while" expression ":" suite\n ["else" ":" suite]\n\nThis repeatedly tests the expression and, if it is true, executes the\nfirst suite; if the expression is false (which may be the first time\nit is tested) the suite of the ``else`` clause, if present, is\nexecuted and the loop terminates.\n\nA ``break`` statement executed in the first suite terminates the loop\nwithout executing the ``else`` clause\'s suite. A ``continue``\nstatement executed in the first suite skips the rest of the suite and\ngoes back to testing the expression.\n', 'with': '\nThe ``with`` statement\n**********************\n\nNew in version 2.5.\n\nThe ``with`` statement is used to wrap the execution of a block with\nmethods defined by a context manager (see section *With Statement\nContext Managers*). This allows common\n``try``...``except``...``finally`` usage patterns to be encapsulated\nfor convenient reuse.\n\n with_stmt ::= "with" with_item ("," with_item)* ":" suite\n with_item ::= expression ["as" target]\n\nThe execution of the ``with`` statement with one "item" proceeds as\nfollows:\n\n1. The context expression (the expression given in the ``with_item``)\n is evaluated to obtain a context manager.\n\n2. The context manager\'s ``__exit__()`` is loaded for later use.\n\n3. The context manager\'s ``__enter__()`` method is invoked.\n\n4. If a target was included in the ``with`` statement, the return\n value from ``__enter__()`` is assigned to it.\n\n Note: The ``with`` statement guarantees that if the ``__enter__()``\n method returns without an error, then ``__exit__()`` will always\n be called. Thus, if an error occurs during the assignment to the\n target list, it will be treated the same as an error occurring\n within the suite would be. See step 6 below.\n\n5. The suite is executed.\n\n6. The context manager\'s ``__exit__()`` method is invoked. If an\n exception caused the suite to be exited, its type, value, and\n traceback are passed as arguments to ``__exit__()``. Otherwise,\n three ``None`` arguments are supplied.\n\n If the suite was exited due to an exception, and the return value\n from the ``__exit__()`` method was false, the exception is\n reraised. If the return value was true, the exception is\n suppressed, and execution continues with the statement following\n the ``with`` statement.\n\n If the suite was exited for any reason other than an exception, the\n return value from ``__exit__()`` is ignored, and execution proceeds\n at the normal location for the kind of exit that was taken.\n\nWith more than one item, the context managers are processed as if\nmultiple ``with`` statements were nested:\n\n with A() as a, B() as b:\n suite\n\nis equivalent to\n\n with A() as a:\n with B() as b:\n suite\n\nNote: In Python 2.5, the ``with`` statement is only allowed when the\n ``with_statement`` feature has been enabled. It is always enabled\n in Python 2.6.\n\nChanged in version 2.7: Support for multiple context expressions.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n ``with`` statement.\n', - 'yield': '\nThe ``yield`` statement\n***********************\n\n yield_stmt ::= yield_expression\n\nThe ``yield`` statement is only used when defining a generator\nfunction, and is only used in the body of the generator function.\nUsing a ``yield`` statement in a function definition is sufficient to\ncause that definition to create a generator function instead of a\nnormal function.\n\nWhen a generator function is called, it returns an iterator known as a\ngenerator iterator, or more commonly, a generator. The body of the\ngenerator function is executed by calling the generator\'s ``next()``\nmethod repeatedly until it raises an exception.\n\nWhen a ``yield`` statement is executed, the state of the generator is\nfrozen and the value of ``expression_list`` is returned to\n``next()``\'s caller. By "frozen" we mean that all local state is\nretained, including the current bindings of local variables, the\ninstruction pointer, and the internal evaluation stack: enough\ninformation is saved so that the next time ``next()`` is invoked, the\nfunction can proceed exactly as if the ``yield`` statement were just\nanother external call.\n\nAs of Python version 2.5, the ``yield`` statement is now allowed in\nthe ``try`` clause of a ``try`` ... ``finally`` construct. If the\ngenerator is not resumed before it is finalized (by reaching a zero\nreference count or by being garbage collected), the generator-\niterator\'s ``close()`` method will be called, allowing any pending\n``finally`` clauses to execute.\n\nNote: In Python 2.2, the ``yield`` statement was only allowed when the\n ``generators`` feature has been enabled. This ``__future__`` import\n statement was used to enable the feature:\n\n from __future__ import generators\n\nSee also:\n\n **PEP 0255** - Simple Generators\n The proposal for adding generators and the ``yield`` statement\n to Python.\n\n **PEP 0342** - Coroutines via Enhanced Generators\n The proposal that, among other generator enhancements, proposed\n allowing ``yield`` to appear inside a ``try`` ... ``finally``\n block.\n'} + 'yield': '\nThe ``yield`` statement\n***********************\n\n yield_stmt ::= yield_expression\n\nThe ``yield`` statement is only used when defining a generator\nfunction, and is only used in the body of the generator function.\nUsing a ``yield`` statement in a function definition is sufficient to\ncause that definition to create a generator function instead of a\nnormal function.\n\nWhen a generator function is called, it returns an iterator known as a\ngenerator iterator, or more commonly, a generator. The body of the\ngenerator function is executed by calling the generator\'s ``next()``\nmethod repeatedly until it raises an exception.\n\nWhen a ``yield`` statement is executed, the state of the generator is\nfrozen and the value of ``expression_list`` is returned to\n``next()``\'s caller. By "frozen" we mean that all local state is\nretained, including the current bindings of local variables, the\ninstruction pointer, and the internal evaluation stack: enough\ninformation is saved so that the next time ``next()`` is invoked, the\nfunction can proceed exactly as if the ``yield`` statement were just\nanother external call.\n\nAs of Python version 2.5, the ``yield`` statement is now allowed in\nthe ``try`` clause of a ``try`` ... ``finally`` construct. If the\ngenerator is not resumed before it is finalized (by reaching a zero\nreference count or by being garbage collected), the generator-\niterator\'s ``close()`` method will be called, allowing any pending\n``finally`` clauses to execute.\n\nFor full details of ``yield`` semantics, refer to the *Yield\nexpressions* section.\n\nNote: In Python 2.2, the ``yield`` statement was only allowed when the\n ``generators`` feature has been enabled. This ``__future__`` import\n statement was used to enable the feature:\n\n from __future__ import generators\n\nSee also:\n\n **PEP 0255** - Simple Generators\n The proposal for adding generators and the ``yield`` statement\n to Python.\n\n **PEP 0342** - Coroutines via Enhanced Generators\n The proposal that, among other generator enhancements, proposed\n allowing ``yield`` to appear inside a ``try`` ... ``finally``\n block.\n'} diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py --- a/Lib/sre_parse.py +++ b/Lib/sre_parse.py @@ -549,7 +549,8 @@ if not name: raise error("missing group name") if not isname(name): - raise error, "bad character in group name" + raise error("bad character in group name %r" % + name) elif sourcematch("="): # named backreference name = "" @@ -563,7 +564,8 @@ if not name: raise error("missing group name") if not isname(name): - raise error, "bad character in group name" + raise error("bad character in backref group name " + "%r" % name) gid = state.groupdict.get(name) if gid is None: raise error, "unknown group name" diff --git a/Lib/ssl.py b/Lib/ssl.py --- a/Lib/ssl.py +++ b/Lib/ssl.py @@ -344,17 +344,21 @@ SSL channel, and the address of the remote client.""" newsock, addr = socket.accept(self) - return (SSLSocket(newsock, - keyfile=self.keyfile, - certfile=self.certfile, - server_side=True, - cert_reqs=self.cert_reqs, - ssl_version=self.ssl_version, - ca_certs=self.ca_certs, - ciphers=self.ciphers, - do_handshake_on_connect=self.do_handshake_on_connect, - suppress_ragged_eofs=self.suppress_ragged_eofs), - addr) + try: + return (SSLSocket(newsock, + keyfile=self.keyfile, + certfile=self.certfile, + server_side=True, + cert_reqs=self.cert_reqs, + ssl_version=self.ssl_version, + ca_certs=self.ca_certs, + ciphers=self.ciphers, + do_handshake_on_connect=self.do_handshake_on_connect, + suppress_ragged_eofs=self.suppress_ragged_eofs), + addr) + except socket_error as e: + newsock.close() + raise e def makefile(self, mode='r', bufsize=-1): diff --git a/Lib/tarfile.py b/Lib/tarfile.py --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -2462,16 +2462,18 @@ # Fix for SF #1100429: Under rare circumstances it can # happen that getmembers() is called during iteration, # which will cause TarIter to stop prematurely. - if not self.tarfile._loaded: + + if self.index == 0 and self.tarfile.firstmember is not None: + tarinfo = self.tarfile.next() + elif self.index < len(self.tarfile.members): + tarinfo = self.tarfile.members[self.index] + elif not self.tarfile._loaded: tarinfo = self.tarfile.next() if not tarinfo: self.tarfile._loaded = True raise StopIteration else: - try: - tarinfo = self.tarfile.members[self.index] - except IndexError: - raise StopIteration + raise StopIteration self.index += 1 return tarinfo diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -538,6 +538,8 @@ "'abc\"", # open quote and close quote don't match "'abc' ?", # junk after close quote "'\\'", # trailing backslash + "'", # issue #17710 + "' ", # issue #17710 # some tests of the quoting rules #"'abc\"\''", #"'\\\\a\'\'\'\\\'\\\\\''", diff --git a/Lib/test/test_base64.py b/Lib/test/test_base64.py --- a/Lib/test/test_base64.py +++ b/Lib/test/test_base64.py @@ -18,6 +18,8 @@ "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n") + # Non-bytes + eq(base64.encodestring(bytearray('abc')), 'YWJj\n') def test_decodestring(self): eq = self.assertEqual @@ -32,6 +34,8 @@ "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}") eq(base64.decodestring(''), '') + # Non-bytes + eq(base64.decodestring(bytearray("YWJj\n")), "abc") def test_encode(self): eq = self.assertEqual @@ -73,6 +77,10 @@ "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") # Test with arbitrary alternative characters eq(base64.b64encode('\xd3V\xbeo\xf7\x1d', altchars='*$'), '01a*b$cd') + # Non-bytes + eq(base64.b64encode(bytearray('abcd')), 'YWJjZA==') + self.assertRaises(TypeError, base64.b64encode, + '\xd3V\xbeo\xf7\x1d', altchars=bytearray('*$')) # Test standard alphabet eq(base64.standard_b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=") eq(base64.standard_b64encode("a"), "YQ==") @@ -85,8 +93,12 @@ "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") + # Non-bytes + eq(base64.standard_b64encode(bytearray('abcd')), 'YWJjZA==') # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64encode('\xd3V\xbeo\xf7\x1d'), '01a-b_cd') + # Non-bytes + eq(base64.urlsafe_b64encode(bytearray('\xd3V\xbeo\xf7\x1d')), '01a-b_cd') def test_b64decode(self): eq = self.assertEqual @@ -104,6 +116,8 @@ eq(base64.b64decode(''), '') # Test with arbitrary alternative characters eq(base64.b64decode('01a*b$cd', altchars='*$'), '\xd3V\xbeo\xf7\x1d') + # Non-bytes + eq(base64.b64decode(bytearray("YWJj")), "abc") # Test standard alphabet eq(base64.standard_b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org") eq(base64.standard_b64decode("YQ=="), "a") @@ -116,8 +130,12 @@ "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}") + # Non-bytes + eq(base64.standard_b64decode(bytearray("YWJj")), "abc") # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64decode('01a-b_cd'), '\xd3V\xbeo\xf7\x1d') + # Non-bytes + eq(base64.urlsafe_b64decode(bytearray('01a-b_cd')), '\xd3V\xbeo\xf7\x1d') def test_b64decode_error(self): self.assertRaises(TypeError, base64.b64decode, 'abc') @@ -131,6 +149,8 @@ eq(base64.b32encode('abc'), 'MFRGG===') eq(base64.b32encode('abcd'), 'MFRGGZA=') eq(base64.b32encode('abcde'), 'MFRGGZDF') + # Non-bytes + eq(base64.b32encode(bytearray('abcd')), 'MFRGGZA=') def test_b32decode(self): eq = self.assertEqual @@ -141,6 +161,8 @@ eq(base64.b32decode('MFRGG==='), 'abc') eq(base64.b32decode('MFRGGZA='), 'abcd') eq(base64.b32decode('MFRGGZDF'), 'abcde') + # Non-bytes + self.assertRaises(TypeError, base64.b32decode, bytearray('MFRGG===')) def test_b32decode_casefold(self): eq = self.assertEqual @@ -171,6 +193,8 @@ eq = self.assertEqual eq(base64.b16encode('\x01\x02\xab\xcd\xef'), '0102ABCDEF') eq(base64.b16encode('\x00'), '00') + # Non-bytes + eq(base64.b16encode(bytearray('\x01\x02\xab\xcd\xef')), '0102ABCDEF') def test_b16decode(self): eq = self.assertEqual @@ -180,6 +204,8 @@ self.assertRaises(TypeError, base64.b16decode, '0102abcdef') # Case fold eq(base64.b16decode('0102abcdef', True), '\x01\x02\xab\xcd\xef') + # Non-bytes + eq(base64.b16decode(bytearray("0102ABCDEF")), '\x01\x02\xab\xcd\xef') diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -25,9 +25,6 @@ DATA_CRLF = 'BZh91AY&SY\xaez\xbbN\x00\x01H\xdf\x80\x00\x12@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe0@\x01\xbc\xc6`\x86*\x8d=M\xa9\x9a\x86\xd0L@\x0fI\xa6!\xa1\x13\xc8\x88jdi\x8d@\x03@\x1a\x1a\x0c\x0c\x83 \x00\xc4h2\x19\x01\x82D\x84e\t\xe8\x99\x89\x19\x1ah\x00\r\x1a\x11\xaf\x9b\x0fG\xf5(\x1b\x1f?\t\x12\xcf\xb5\xfc\x95E\x00ps\x89\x12^\xa4\xdd\xa2&\x05(\x87\x04\x98\x89u\xe40%\xb6\x19\'\x8c\xc4\x89\xca\x07\x0e\x1b!\x91UIFU%C\x994!DI\xd2\xfa\xf0\xf1N8W\xde\x13A\xf5\x9cr%?\x9f3;I45A\xd1\x8bT\xb1\xa4\xc7\x8d\x1a\\"\xad\xa1\xabyBg\x15\xb9l\x88\x88\x91k"\x94\xa4\xd4\x89\xae*\xa6\x0b\x10\x0c\xd6\xd4m\xe86\xec\xb5j\x8a\x86j\';\xca.\x01I\xf2\xaaJ\xe8\x88\x8cU+t3\xfb\x0c\n\xa33\x13r2\r\x16\xe0\xb3(\xbf\x1d\x83r\xe7M\xf0D\x1365\xd8\x88\xd3\xa4\x92\xcb2\x06\x04\\\xc1\xb0\xea//\xbek&\xd8\xe6+t\xe5\xa1\x13\xada\x16\xder5"w]\xa2i\xb7[\x97R \xe2IT\xcd;Z\x04dk4\xad\x8a\t\xd3\x81z\x10\xf1:^`\xab\x1f\xc5\xdc\x91N\x14$+\x9e\xae\xd3\x80' EMPTY_DATA = 'BZh9\x17rE8P\x90\x00\x00\x00\x00' - with open(findfile("testbz2_bigmem.bz2"), "rb") as f: - DATA_BIGMEM = f.read() - if has_cmdline_bunzip2: def decompress(self, data): pop = subprocess.Popen("bunzip2", shell=True, @@ -328,24 +325,6 @@ self.assertRaises(ValueError, f.readline) self.assertRaises(ValueError, f.readlines) - def test_read_truncated(self): - # Drop the eos_magic field (6 bytes) and CRC (4 bytes). - truncated = self.DATA[:-10] - with open(self.filename, 'wb') as f: - f.write(truncated) - with BZ2File(self.filename) as f: - self.assertRaises(EOFError, f.read) - with BZ2File(self.filename) as f: - self.assertEqual(f.read(len(self.TEXT)), self.TEXT) - self.assertRaises(EOFError, f.read, 1) - # Incomplete 4-byte file header, and block header of at least 146 bits. - for i in range(22): - with open(self.filename, 'wb') as f: - f.write(truncated[:i]) - with BZ2File(self.filename) as f: - self.assertRaises(EOFError, f.read, 1) - - class BZ2CompressorTest(BaseTest): def testCompress(self): # "Test BZ2Compressor.compress()/flush()" @@ -431,9 +410,10 @@ # Issue #14398: decompression fails when output data is >=2GB. if size < _4G: self.skipTest("Test needs 5GB of memory to run.") - text = bz2.BZ2Decompressor().decompress(self.DATA_BIGMEM) + compressed = bz2.compress("a" * _4G) + text = bz2.BZ2Decompressor().decompress(compressed) self.assertEqual(len(text), _4G) - self.assertEqual(text.strip("\0"), "") + self.assertEqual(text.strip("a"), "") class FuncTest(BaseTest): @@ -482,9 +462,10 @@ # Issue #14398: decompression fails when output data is >=2GB. if size < _4G: self.skipTest("Test needs 5GB of memory to run.") - text = bz2.decompress(self.DATA_BIGMEM) + compressed = bz2.compress("a" * _4G) + text = bz2.decompress(compressed) self.assertEqual(len(text), _4G) - self.assertEqual(text.strip("\0"), "") + self.assertEqual(text.strip("a"), "") def test_main(): test_support.run_unittest( diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py --- a/Lib/test/test_collections.py +++ b/Lib/test/test_collections.py @@ -78,12 +78,12 @@ self.assertRaises(TypeError, eval, 'Point(XXX=1, y=2)', locals()) # wrong keyword argument self.assertRaises(TypeError, eval, 'Point(x=1)', locals()) # missing keyword argument self.assertEqual(repr(p), 'Point(x=11, y=22)') + self.assertNotIn('__dict__', dir(p)) # verify instance has no dict self.assertNotIn('__weakref__', dir(p)) self.assertEqual(p, Point._make([11, 22])) # test _make classmethod self.assertEqual(p._fields, ('x', 'y')) # test _fields attribute self.assertEqual(p._replace(x=1), (1, 22)) # test _replace method self.assertEqual(p._asdict(), dict(x=11, y=22)) # test _asdict method - self.assertEqual(vars(p), p._asdict()) # verify that vars() works try: p._replace(x=1, error=2) diff --git a/Lib/test/test_dictviews.py b/Lib/test/test_dictviews.py --- a/Lib/test/test_dictviews.py +++ b/Lib/test/test_dictviews.py @@ -144,6 +144,11 @@ self.assertEqual(d1.viewitems() ^ d3.viewitems(), {('a', 1), ('b', 2), ('d', 4), ('e', 5)}) + def test_recursive_repr(self): + d = {} + d[42] = d.viewvalues() + self.assertRaises(RuntimeError, repr, d) + diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py --- a/Lib/test/test_gdb.py +++ b/Lib/test/test_gdb.py @@ -142,30 +142,32 @@ # Use "args" to invoke gdb, capturing stdout, stderr: out, err = run_gdb(*args, PYTHONHASHSEED='0') - # Ignore some noise on stderr due to the pending breakpoint: - err = err.replace('Function "%s" not defined.\n' % breakpoint, '') - # Ignore some other noise on stderr (http://bugs.python.org/issue8600) - err = err.replace("warning: Unable to find libthread_db matching" - " inferior's thread library, thread debugging will" - " not be available.\n", - '') - err = err.replace("warning: Cannot initialize thread debugging" - " library: Debugger service failed\n", - '') - err = err.replace('warning: Could not load shared library symbols for ' - 'linux-vdso.so.1.\n' - 'Do you need "set solib-search-path" or ' - '"set sysroot"?\n', - '') - err = err.replace('warning: Could not load shared library symbols for ' - 'linux-gate.so.1.\n' - 'Do you need "set solib-search-path" or ' - '"set sysroot"?\n', - '') + errlines = err.splitlines() + unexpected_errlines = [] + + # Ignore some benign messages on stderr. + ignore_patterns = ( + 'Function "%s" not defined.' % breakpoint, + "warning: no loadable sections found in added symbol-file" + " system-supplied DSO", + "warning: Unable to find libthread_db matching" + " inferior's thread library, thread debugging will" + " not be available.", + "warning: Cannot initialize thread debugging" + " library: Debugger service failed", + 'warning: Could not load shared library symbols for ' + 'linux-vdso.so', + 'warning: Could not load shared library symbols for ' + 'linux-gate.so', + 'Do you need "set solib-search-path" or ' + '"set sysroot"?', + ) + for line in errlines: + if not line.startswith(ignore_patterns): + unexpected_errlines.append(line) # Ensure no unexpected error messages: - self.assertEqual(err, '') - + self.assertEqual(unexpected_errlines, []) return out def get_gdb_repr(self, source, diff --git a/Lib/test/test_gzip.py b/Lib/test/test_gzip.py --- a/Lib/test/test_gzip.py +++ b/Lib/test/test_gzip.py @@ -289,23 +289,6 @@ with gzip.GzipFile(fileobj=f, mode="w") as g: self.assertEqual(g.name, "") - def test_read_truncated(self): - data = data1*50 - buf = io.BytesIO() - with gzip.GzipFile(fileobj=buf, mode="w") as f: - f.write(data) - # Drop the CRC (4 bytes) and file size (4 bytes). - truncated = buf.getvalue()[:-8] - with gzip.GzipFile(fileobj=io.BytesIO(truncated)) as f: - self.assertRaises(EOFError, f.read) - with gzip.GzipFile(fileobj=io.BytesIO(truncated)) as f: - self.assertEqual(f.read(len(data)), data) - self.assertRaises(EOFError, f.read, 1) - # Incomplete 10-byte header. - for i in range(2, 10): - with gzip.GzipFile(fileobj=io.BytesIO(truncated[:i])) as f: - self.assertRaises(EOFError, f.read, 1) - def test_read_with_extra(self): # Gzip data with an extra field gzdata = (b'\x1f\x8b\x08\x04\xb2\x17cQ\x02\xff' diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -2880,7 +2880,7 @@ # The buffered IO layer must check for pending signal # handlers, which in this case will invoke alarm_interrupt(). self.assertRaises(ZeroDivisionError, - wio.write, item * (3 * 1000 * 1000)) + wio.write, item * (support.PIPE_MAX_SIZE // len(item) + 1)) t.join() # We got one byte, get another one and check that it isn't a # repeat of the first one. @@ -2978,7 +2978,7 @@ select = support.import_module("select") # A quantity that exceeds the buffer size of an anonymous pipe's # write end. - N = 1024 * 1024 + N = support.PIPE_MAX_SIZE r, w = os.pipe() fdopen_kwargs["closefd"] = False # We need a separate thread to read from the pipe and allow the diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py --- a/Lib/test/test_mimetypes.py +++ b/Lib/test/test_mimetypes.py @@ -21,6 +21,8 @@ eq(self.db.guess_type("foo.tgz"), ("application/x-tar", "gzip")) eq(self.db.guess_type("foo.tar.gz"), ("application/x-tar", "gzip")) eq(self.db.guess_type("foo.tar.Z"), ("application/x-tar", "compress")) + eq(self.db.guess_type("foo.tar.bz2"), ("application/x-tar", "bzip2")) + eq(self.db.guess_type("foo.tar.xz"), ("application/x-tar", "xz")) def test_data_urls(self): eq = self.assertEqual diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -2430,13 +2430,43 @@ [sys.executable, '-E', '-B', '-O', '-c', prog]) child_flags, grandchild_flags = json.loads(data.decode('ascii')) self.assertEqual(child_flags, grandchild_flags) + +# +# Issue #17555: ForkAwareThreadLock +# + +class TestForkAwareThreadLock(unittest.TestCase): + # We recurisvely start processes. Issue #17555 meant that the + # after fork registry would get duplicate entries for the same + # lock. The size of the registry at generation n was ~2**n. + + @classmethod + def child(cls, n, conn): + if n > 1: + p = multiprocessing.Process(target=cls.child, args=(n-1, conn)) + p.start() + p.join() + else: + conn.send(len(util._afterfork_registry)) + conn.close() + + def test_lock(self): + r, w = multiprocessing.Pipe(False) + l = util.ForkAwareThreadLock() + old_size = len(util._afterfork_registry) + p = multiprocessing.Process(target=self.child, args=(5, w)) + p.start() + new_size = r.recv() + p.join() + self.assertLessEqual(new_size, old_size) + # # # testcases_other = [OtherTest, TestInvalidHandle, TestInitializers, TestStdinBadfiledescriptor, TestTimeouts, TestNoForkBomb, - TestFlags] + TestFlags, TestForkAwareThreadLock] # # diff --git a/Lib/test/test_plistlib.py b/Lib/test/test_plistlib.py --- a/Lib/test/test_plistlib.py +++ b/Lib/test/test_plistlib.py @@ -135,6 +135,18 @@ data2 = plistlib.writePlistToString(pl2) self.assertEqual(data, data2) + def test_indentation_array(self): + data = [[[[[[[[{'test': plistlib.Data(b'aaaaaa')}]]]]]]]] + self.assertEqual(plistlib.readPlistFromString(plistlib.writePlistToString(data)), data) + + def test_indentation_dict(self): + data = {'1': {'2': {'3': {'4': {'5': {'6': {'7': {'8': {'9': plistlib.Data(b'aaaaaa')}}}}}}}}} + self.assertEqual(plistlib.readPlistFromString(plistlib.writePlistToString(data)), data) + + def test_indentation_dict_mix(self): + data = {'1': {'2': [{'3': [[[[[{'test': plistlib.Data(b'aaaaaa')}]]]]]}]}} + self.assertEqual(plistlib.readPlistFromString(plistlib.writePlistToString(data)), data) + def test_appleformatting(self): pl = plistlib.readPlistFromString(TESTDATA) data = plistlib.writePlistToString(pl) diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py --- a/Lib/test/test_pydoc.py +++ b/Lib/test/test_pydoc.py @@ -4,15 +4,17 @@ import __builtin__ import re import pydoc +import contextlib import inspect import keyword +import pkgutil import unittest import xml.etree import test.test_support from collections import namedtuple from test.script_helper import assert_python_ok from test.test_support import ( - TESTFN, rmtree, reap_children, captured_stdout) + TESTFN, rmtree, reap_children, captured_stdout, captured_stderr) from test import pydoc_mod @@ -228,7 +230,30 @@ print '\n' + ''.join(diffs) -class PyDocDocTest(unittest.TestCase): +class PydocBaseTest(unittest.TestCase): + + def _restricted_walk_packages(self, walk_packages, path=None): + """ + A version of pkgutil.walk_packages() that will restrict itself to + a given path. + """ + default_path = path or [os.path.dirname(__file__)] + def wrapper(path=None, prefix='', onerror=None): + return walk_packages(path or default_path, prefix, onerror) + return wrapper + + @contextlib.contextmanager + def restrict_walk_packages(self, path=None): + walk_packages = pkgutil.walk_packages + pkgutil.walk_packages = self._restricted_walk_packages(walk_packages, + path) + try: + yield + finally: + pkgutil.walk_packages = walk_packages + + +class PydocDocTest(unittest.TestCase): @unittest.skipIf(sys.flags.optimize >= 2, "Docstrings are omitted with -O2 and above") @@ -303,7 +328,7 @@ "") -class PydocImportTest(unittest.TestCase): +class PydocImportTest(PydocBaseTest): def setUp(self): self.test_dir = os.mkdir(TESTFN) @@ -338,8 +363,19 @@ badsyntax = os.path.join(pkgdir, "__init__") + os.extsep + "py" with open(badsyntax, 'w') as f: f.write("invalid python syntax = $1\n") - result = run_pydoc('zqwykjv', '-k', PYTHONPATH=TESTFN) - self.assertEqual('', result) + with self.restrict_walk_packages(path=[TESTFN]): + with captured_stdout() as out: + with captured_stderr() as err: + pydoc.apropos('xyzzy') + # No result, no error + self.assertEqual(out.getvalue(), '') + self.assertEqual(err.getvalue(), '') + # The package name is still matched + with captured_stdout() as out: + with captured_stderr() as err: + pydoc.apropos('syntaxerr') + self.assertEqual(out.getvalue().strip(), 'syntaxerr') + self.assertEqual(err.getvalue(), '') def test_apropos_with_unreadable_dir(self): # Issue 7367 - pydoc -k failed when unreadable dir on path @@ -348,8 +384,13 @@ self.addCleanup(os.rmdir, self.unreadable_dir) # Note, on Windows the directory appears to be still # readable so this is not really testing the issue there - result = run_pydoc('zqwykjv', '-k', PYTHONPATH=TESTFN) - self.assertEqual('', result) + with self.restrict_walk_packages(path=[TESTFN]): + with captured_stdout() as out: + with captured_stderr() as err: + pydoc.apropos('SOMEKEY') + # No result, no error + self.assertEqual(out.getvalue(), '') + self.assertEqual(err.getvalue(), '') class TestDescriptions(unittest.TestCase): @@ -412,7 +453,7 @@ def test_main(): try: - test.test_support.run_unittest(PyDocDocTest, + test.test_support.run_unittest(PydocDocTest, PydocImportTest, TestDescriptions, TestHelper) diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -2,6 +2,7 @@ from test.test_support import precisionbigmemtest, _2G, cpython_only import re from re import Scanner +import sre_constants import sys import string import traceback @@ -886,6 +887,16 @@ self.assertRaises(OverflowError, re.compile, r".{,%d}" % MAXREPEAT) self.assertRaises(OverflowError, re.compile, r".{%d,}?" % MAXREPEAT) + def test_backref_group_name_in_exception(self): + # Issue 17341: Poor error message when compiling invalid regex + with self.assertRaisesRegexp(sre_constants.error, ''): + re.compile('(?P=)') + + def test_group_name_in_exception(self): + # Issue 17341: Poor error message when compiling invalid regex + with self.assertRaisesRegexp(sre_constants.error, '\?foo'): + re.compile('(?P)') + def run_re_tests(): from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR diff --git a/Lib/test/test_sax.py b/Lib/test/test_sax.py --- a/Lib/test/test_sax.py +++ b/Lib/test/test_sax.py @@ -284,6 +284,26 @@ self.assertEqual(result.getvalue(), start + " ") + def test_xmlgen_encoding_bytes(self): + encodings = ('iso-8859-15', 'utf-8', + 'utf-16be', 'utf-16le', + 'utf-32be', 'utf-32le') + for encoding in encodings: + result = self.ioclass() + gen = XMLGenerator(result, encoding=encoding) + + gen.startDocument() + gen.startElement("doc", {"a": u'\u20ac'}) + gen.characters(u"\u20ac".encode(encoding)) + gen.ignorableWhitespace(" ".encode(encoding)) + gen.endElement("doc") + gen.endDocument() + + self.assertEqual(result.getvalue(), ( + u'\n' + u'\u20ac ' % encoding + ).encode(encoding, 'xmlcharrefreplace')) + def test_xmlgen_ns(self): result = self.ioclass() gen = XMLGenerator(result) diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -400,6 +400,15 @@ return (len(x) > len(y)) - (len(x) < len(y)) return (x > y) - (x < y) + +# A constant likely larger than the underlying OS pipe buffer size, to +# make writes blocking. +# Windows limit seems to be around 512 B, and many Unix kernels have a +# 64 KiB pipe buffer size or 16 * PAGE_SIZE: take a few megs to be sure. +# (see issue #17835 for a discussion of this number). +PIPE_MAX_SIZE = 4 *1024 * 1024 + 1 + + try: unicode have_unicode = True diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -345,6 +345,14 @@ finally: os.remove(empty) + def test_parallel_iteration(self): + # Issue #16601: Restarting iteration over tarfile continued + # from where it left off. + with tarfile.open(self.tarname) as tar: + for m1, m2 in zip(tar, tar): + self.assertEqual(m1.offset, m2.offset) + self.assertEqual(m1.name, m2.name) + class StreamReadTest(CommonReadTest): diff --git a/Lib/test/test_tcl.py b/Lib/test/test_tcl.py --- a/Lib/test/test_tcl.py +++ b/Lib/test/test_tcl.py @@ -4,6 +4,7 @@ import sys import os from test import test_support +from subprocess import Popen, PIPE # Skip this test if the _tkinter module wasn't built. _tkinter = test_support.import_module('_tkinter') @@ -146,11 +147,20 @@ with test_support.EnvironmentVarGuard() as env: env.unset("TCL_LIBRARY") - f = os.popen('%s -c "import Tkinter; print Tkinter"' % (unc_name,)) + cmd = '%s -c "import Tkinter; print Tkinter"' % (unc_name,) - self.assertTrue('Tkinter.py' in f.read()) - # exit code must be zero - self.assertEqual(f.close(), None) + p = Popen(cmd, stdout=PIPE, stderr=PIPE) + out_data, err_data = p.communicate() + + msg = '\n\n'.join(['"Tkinter.py" not in output', + 'Command:', cmd, + 'stdout:', out_data, + 'stderr:', err_data]) + + self.assertIn('Tkinter.py', out_data, msg) + + self.assertEqual(p.wait(), 0, 'Non-zero exit code') + def test_passing_values(self): def passValue(value): diff --git a/Lib/test/test_weakset.py b/Lib/test/test_weakset.py --- a/Lib/test/test_weakset.py +++ b/Lib/test/test_weakset.py @@ -351,6 +351,12 @@ self.assertFalse(self.s == tuple(self.items)) self.assertFalse(self.s == 1) + def test_ne(self): + self.assertTrue(self.s != set(self.items)) + s1 = WeakSet() + s2 = WeakSet() + self.assertFalse(s1 != s2) + def test_weak_destroy_while_iterating(self): # Issue #7105: iterators shouldn't crash when a key is implicitly removed # Create new items to be sure no-one else holds a reference diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py --- a/Lib/test/test_winreg.py +++ b/Lib/test/test_winreg.py @@ -28,9 +28,12 @@ # tests are only valid up until 6.1 HAS_REFLECTION = True if WIN_VER < (6, 1) else False -test_key_name = "SOFTWARE\\Python Registry Test Key - Delete Me" +# Use a per-process key to prevent concurrent test runs (buildbot!) from +# stomping on each other. +test_key_base = "Python Test Key [%d] - Delete Me" % (os.getpid(),) +test_key_name = "SOFTWARE\\" + test_key_base # On OS'es that support reflection we should test with a reflected key -test_reflect_key_name = "SOFTWARE\\Classes\\Python Test Key - Delete Me" +test_reflect_key_name = "SOFTWARE\\Classes\\" + test_key_base test_data = [ ("Int Value", 45, REG_DWORD), @@ -439,6 +442,11 @@ DeleteKeyEx(HKEY_CURRENT_USER, test_reflect_key_name, KEY_WOW64_32KEY, 0) + def test_exception_numbers(self): + with self.assertRaises(WindowsError) as ctx: + QueryValue(HKEY_CLASSES_ROOT, 'some_value_that_does_not_exist') + + self.assertEqual(ctx.exception.errno, 2) def test_main(): test_support.run_unittest(LocalWinregTests, RemoteWinregTests, diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py --- a/Lib/test/test_zipfile.py +++ b/Lib/test/test_zipfile.py @@ -18,7 +18,14 @@ from random import randint, random from unittest import skipUnless -from test.test_support import TESTFN, TESTFN_UNICODE, run_unittest, findfile, unlink +from test.test_support import TESTFN, TESTFN_UNICODE, TESTFN_ENCODING, \ + run_unittest, findfile, unlink +try: + TESTFN_UNICODE.encode(TESTFN_ENCODING) +except (UnicodeError, TypeError): + # Either the file system encoding is None, or the file name + # cannot be encoded in the file system encoding. + TESTFN_UNICODE = None TESTFN2 = TESTFN + "2" TESTFNDIR = TESTFN + "d" @@ -424,6 +431,7 @@ with open(filename, 'rb') as f: self.assertEqual(f.read(), content) + @skipUnless(TESTFN_UNICODE, "No Unicode filesystem semantics on this platform.") def test_extract_unicode_filenames(self): fnames = [u'foo.txt', os.path.basename(TESTFN_UNICODE)] content = 'Test for unicode filename' diff --git a/Lib/test/testbz2_bigmem.bz2 b/Lib/test/testbz2_bigmem.bz2 deleted file mode 100644 Binary file Lib/test/testbz2_bigmem.bz2 has changed diff --git a/Lib/threading.py b/Lib/threading.py --- a/Lib/threading.py +++ b/Lib/threading.py @@ -457,21 +457,20 @@ """ rc = False - self.__cond.acquire() - while self.__value == 0: - if not blocking: - break - if __debug__: - self._note("%s.acquire(%s): blocked waiting, value=%s", - self, blocking, self.__value) - self.__cond.wait() - else: - self.__value = self.__value - 1 - if __debug__: - self._note("%s.acquire: success, value=%s", - self, self.__value) - rc = True - self.__cond.release() + with self.__cond: + while self.__value == 0: + if not blocking: + break + if __debug__: + self._note("%s.acquire(%s): blocked waiting, value=%s", + self, blocking, self.__value) + self.__cond.wait() + else: + self.__value = self.__value - 1 + if __debug__: + self._note("%s.acquire: success, value=%s", + self, self.__value) + rc = True return rc __enter__ = acquire @@ -483,13 +482,12 @@ to become larger than zero again, wake up that thread. """ - self.__cond.acquire() - self.__value = self.__value + 1 - if __debug__: - self._note("%s.release: success, value=%s", - self, self.__value) - self.__cond.notify() - self.__cond.release() + with self.__cond: + self.__value = self.__value + 1 + if __debug__: + self._note("%s.release: success, value=%s", + self, self.__value) + self.__cond.notify() def __exit__(self, t, v, tb): self.release() diff --git a/Lib/xml/sax/saxutils.py b/Lib/xml/sax/saxutils.py --- a/Lib/xml/sax/saxutils.py +++ b/Lib/xml/sax/saxutils.py @@ -180,10 +180,14 @@ self._write(u'' % self._qname(name)) def characters(self, content): - self._write(escape(unicode(content))) + if not isinstance(content, unicode): + content = unicode(content, self._encoding) + self._write(escape(content)) def ignorableWhitespace(self, content): - self._write(unicode(content)) + if not isinstance(content, unicode): + content = unicode(content, self._encoding) + self._write(content) def processingInstruction(self, target, data): self._write(u'' % (target, data)) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -96,6 +96,7 @@ David Binger Dominic Binks Philippe Biondi +Michael Birtwell Stuart Bishop Roy Bixler Jonathan Black @@ -626,6 +627,7 @@ Jim Lynch Mikael Lyngvig Martin von L?wis +Till Maas Jeff MacDonald John Machin Andrew I MacIntyre @@ -656,6 +658,7 @@ Chris McDonough Greg McFarlane Alan McIntyre +Jessica McKellar Michael McLay Mark Mc Mahon Gordon McMillan @@ -674,6 +677,7 @@ Mike Meyer Piotr Meyer Steven Miale +Jason Michalski Trent Mick Tom Middleton Stan Mihai @@ -809,6 +813,7 @@ Eduardo P?rez Brian Quinlan Anders Qvist +Thomas Rachel Burton Radons Jeff Ramnani Brodie Rao @@ -837,6 +842,7 @@ Jean-Claude Rimbault Vlad Riscutia Wes Rishel +Dan Riti Juan M. Bello Rivas Davide Rizzo Anthony Roach @@ -935,6 +941,7 @@ Ionel Simionescu Kirill Simonov Nathan Paul Simons +Guilherme Sim?es Ravi Sinha Janne Sinkkonen Ng Pheng Siong @@ -1039,6 +1046,7 @@ Kyle VanderBeek Atul Varma Dmitry Vasiliev +Sebastian Ortiz Vasquez Alexandre Vassalotti Frank Vercruesse Mike Verdone @@ -1046,6 +1054,7 @@ Al Vezza Jacques A. Vidrine John Viega +Dino Viehland Kannan Vijayan Kurt Vile Norman Vine diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -1,22 +1,65 @@ Python News +++++++++++ +What's New in Python 2.7.6? +=========================== + +*Release date: XXXX-XX-XX* + +Core and Builtins +----------------- + +- Issue #18019: Fix crash in the repr of dictionaries containing their own + views. + +Library +------- + +- Implement inequality on weakref.WeakSet. + +- Issue #17981: Closed socket on error in SysLogHandler. + +- Issue #17754: Make ctypes.util.find_library() independent of the locale. + +- Fix typos in the multiprocessing module. + +IDLE +---- + +- Issue #14146: Highlight source line while debugging on Windows. + +- Issue #17532: Always include Options menu for IDLE on OS X. + Patch by Guilherme Sim?es. + +Tests +----- + +- Issue #11995: test_pydoc doesn't import all sys.path modules anymore. + +Documentation +------------- + +- Issue #17844: Refactor a documentation of Python specific encodings. + Add links to encoders and decoders for binary-to-binary codecs. + + What's New in Python 2.7.5? =========================== -*Release date: XXXX-XX-XX* - -Build ------ - -- Issue #17682: Add the _io module to Modules/Setup.dist (commented out). - -- Issue #17086: Search the include and library directories provided by the - compiler. +*Release date: 2013-05-12* Core and Builtins ----------------- +- Issue #15535: Fixed regression in the pickling of named tuples by + removing the __dict__ property introduced in 2.7.4. + +- Issue #17857: Prevent build failures with pre-3.5.0 versions of sqlite3, + such as was shipped with Centos 5 and Mac OS X 10.4. + +- Issue #17703: Fix a regression where an illegal use of Py_DECREF() after + interpreter finalization can cause a crash. + - Issue #16447: Fixed potential segmentation fault when setting __name__ on a class. @@ -25,6 +68,58 @@ Library ------- +- Issue #17979: Fixed the re module in build with --disable-unicode. + +- Issue #17606: Fixed support of encoded byte strings in the XMLGenerator + .characters() and ignorableWhitespace() methods. Original patch by Sebastian + Ortiz Vasquez. + +- Issue #16601: Restarting iteration over tarfile no more continues from where + it left off. Patch by Michael Birtwell. + +- Issue 16584: in filecomp._cmp, catch IOError as well as os.error. + Patch by Till Maas. + +- Issue #17926: Fix dbm.__contains__ on 64-bit big-endian machines. + +- Issue #17918: When using SSLSocket.accept(), if the SSL handshake failed + on the new socket, the socket would linger indefinitely. Thanks to + Peter Saveliev for reporting. + +- Issue #17289: The readline module now plays nicer with external modules + or applications changing the rl_completer_word_break_characters global + variable. Initial patch by Bradley Froehle. + +- Issue #12181: select module: Fix struct kevent definition on OpenBSD 64-bit + platforms. Patch by Federico Schwindt. + +- Issue #14173: Avoid crashing when reading a signal handler during + interpreter shutdown. + +- Issue #16316: mimetypes now recognizes the .xz and .txz (.tar.xz) extensions. + +- Issue #17192: Restore the patch for Issue #10309 which was ommitted + in 2.7.4 when updating the bundled version of libffi used by ctypes. + +- Issue #17843: Removed test data file that was triggering false-positive virus + warnings with certain antivirus software. + +- Issue #17353: Plistlib emitted empty data tags with deeply nested datastructures + +- Issue #11714: Use 'with' statements to assure a Semaphore releases a + condition variable. Original patch by Thomas Rachel. + +- Issue #17795: Reverted backwards-incompatible change in SysLogHandler with + Unix domain sockets. + +- Issue #17555: Fix ForkAwareThreadLock so that size of after fork + registry does not grow exponentially with generation of process. + +- Issue #17710: Fix cPickle raising a SystemError on bogus input. + +- Issue #17341: Include the invalid name in the error messages from re about + invalid group names. + - Issue #17016: Get rid of possible pointer wraparounds and integer overflows in the re module. Patch by Nickolai Zeldovich. @@ -45,9 +140,40 @@ - Issue #17526: fix an IndexError raised while passing code without filename to inspect.findsource(). Initial patch by Tyler Doyle. +Build +----- + +- Issue #17547: In configure, explicitly pass -Wformat for the benefit for GCC + 4.8. + +- Issue #17682: Add the _io module to Modules/Setup.dist (commented out). + +- Issue #17086: Search the include and library directories provided by the + compiler. + +Tests +----- + +- Issue #17928: Fix test_structmembers on 64-bit big-endian machines. + +- Issue #17883: Fix buildbot testing of Tkinter on Windows. + Patch by Zachary Ware. + +- Issue #7855: Add tests for ctypes/winreg for issues found in IronPython. + Initial patch by Dino Viehland. + +- Issue #17712: Fix test_gdb failures on Ubuntu 13.04. + +- Issue #17065: Use process-unique key for winreg tests to avoid failures if + test is run multiple times in parallel (eg: on a buildbot host). + IDLE ---- +- Issue #17838: Allow sys.stdin to be reassigned. + +- Issue #14735: Update IDLE docs to omit "Control-z on Windows". + - Issue #17585: Fixed IDLE regression. Now closes when using exit() or quit(). - Issue #17657: Show full Tk version in IDLE's about dialog. @@ -70,7 +196,6 @@ - Issue #6649: Fixed missing exit status in IDLE. Patch by Guilherme Polo. - Documentation ------------- @@ -97,11 +222,17 @@ mapping such that any type with a __getitem__ can be used on the right hand side. -Library -------- +IDLE +---- - Issue #17625: In IDLE, close the replace dialog after it is used. +Tests +----- + +- Issue #17835: Fix test_io when the default OS pipe buffer size is larger + than one million bytes. + - Issue #17531: Fix tests that thought group and user ids were always the int type. Also, always allow -1 as a valid group and user id. @@ -237,8 +368,6 @@ - Issue #15604: Update uses of PyObject_IsTrue() to check for and handle errors correctly. Patch by Serhiy Storchaka. -- Issue #15041: Update "see also" list in tkinter documentation. - - Issue #14579: Fix error handling bug in the utf-16 decoder. Patch by Serhiy Storchaka. @@ -330,7 +459,7 @@ - Issue #12718: Fix interaction with winpdb overriding __import__ by setting importer attribute on BaseConfigurator instance. - + - Issue #17521: Corrected non-enabling of logger following two calls to fileConfig(). @@ -403,14 +532,9 @@ - Issue #6975: os.path.realpath() now correctly resolves multiple nested symlinks on POSIX platforms. -- Issue #17156: pygettext.py now correctly escapes non-ascii characters. - - Issue #7358: cStringIO.StringIO now supports writing to and reading from a stream larger than 2 GiB on 64-bit systems. -- IDLE was displaying spurious SystemExit tracebacks when running scripts - that terminated by raising SystemExit (i.e. unittest and turtledemo). - - Issue #10355: In SpooledTemporaryFile class mode and name properties and xreadlines method now work for unrolled files. encoding and newlines properties now removed as they have no sense and always produced @@ -462,15 +586,9 @@ - Issue #17051: Fix a memory leak in os.path.isdir() on Windows. Patch by Robert Xiao. -- Issue #9290: In IDLE the sys.std* streams now implement io.TextIOBase - interface and support all mandatory methods and properties. - - Issue #13454: Fix a crash when deleting an iterator created by itertools.tee() if all other iterators were very advanced before. -- Issue #1159051: GzipFile now raises EOFError when reading a corrupted file - with truncated header or footer. - - Issue #16992: On Windows in signal.set_wakeup_fd, validate the file descriptor argument. @@ -482,9 +600,6 @@ - Issue #9720: zipfile now writes correct local headers for files larger than 4 GiB. -- Issue #16829: IDLE printing no longer fails if there are spaces or other - special characters in the file path. - - Issue #13899: \A, \Z, and \B now correctly match the A, Z, and B literals when used inside character classes (e.g. '[\A]'). Patch by Matthew Barnett. @@ -502,8 +617,6 @@ - Issue #16828: Fix error incorrectly raised by bz2.compress(''). Patch by Martin Packman. -- Issue #16819: IDLE method completion now correctly works for unicode literals. - - Issue #9586: Redefine SEM_FAILED on MacOSX to keep compiler happy. - Issue #10527: make multiprocessing use poll() instead of select() if available. @@ -514,12 +627,6 @@ - Issue #12065: connect_ex() on an SSL socket now returns the original errno when the socket's timeout expires (it used to return None). -- Issue #16504: IDLE now catches SyntaxErrors raised by tokenizer. Patch by - Roger Serwy. - -- Issue #16702: test_urllib2_localnet tests now correctly ignores proxies for - localhost tests. - - Issue #16713: Fix the parsing of tel url with params using urlparse module. - Issue #16443: Add docstrings to regular expression match objects. @@ -558,8 +665,6 @@ list() calls aren't added to filter(), map(), and zip() which are directly passed enumerate(). -- Issue #16476: Fix json.tool to avoid including trailing whitespace. - - Issue #1160: Fix compiling large regular expressions on UCS2 builds. Patch by Serhiy Storchaka. @@ -590,9 +695,6 @@ - Issue #16152: fix tokenize to ignore whitespace at the end of the code when no newline is found. Patch by Ned Batchelder. -- Issue #1207589: Add Cut/Copy/Paste items to IDLE right click Context Menu - Patch by Todd Rovito. - - Issue #16230: Fix a crash in select.select() when one the lists changes size while iterated on. Patch by Serhiy Storchaka. @@ -678,15 +780,9 @@ - Issue #15424: Add a __sizeof__ implementation for array objects. Patch by Ludwig H?hne. -- Issue #13052: Fix IDLE crashing when replace string in Search/Replace dialog - ended with '\'. Patch by Roger Serwy. - - Issue #15538: Fix compilation of the getnameinfo() / getaddrinfo() emulation code. Patch by Philipp Hagemeister. -- Issue #9803: Don't close IDLE on saving if breakpoint is open. - Patch by Roger Serwy. - - Issue #12288: Consider '0' and '0.0' as valid initialvalue for tkinter SimpleDialog. @@ -761,23 +857,6 @@ - Issue #12157: Make pool.map() empty iterables correctly. Initial patch by mouad. -- Issue #14958: Change IDLE systax highlighting to recognize all string and byte - literals currently supported in Python 2.7. - -- Issue #14962: Update text coloring in IDLE shell window after changing - options. Patch by Roger Serwy. - -- Issue #10997: Prevent a duplicate entry in IDLE's "Recent Files" menu. - -- Issue #12510: Attempting to get invalid tooltip no longer closes Idle. - Original patch by Roger Serwy. - -- Issue #10365: File open dialog now works instead of crashing - even when parent window is closed. Patch by Roger Serwy. - -- Issue #14876: Use user-selected font for highlight configuration. - Patch by Roger Serwy. - - Issue #14036: Add an additional check to validate that port in urlparse does not go in illegal range and returns None. @@ -867,11 +946,6 @@ returned. This avoids crashing the server loop when a signal is received. Patch by Jerzy Kozera. -- Issue #14409: IDLE now properly executes commands in the Shell window - when it cannot read the normal config files on startup and - has to use the built-in default key bindings. - There was previously a bug in one of the defaults. - - Issue #10340: asyncore - properly handle EINVAL in dispatcher constructor on OSX; avoid to call handle_connect in case of a disconnected socket which was not meant to connect. @@ -879,9 +953,6 @@ - Issue #12757: Fix the skipping of doctests when python is run with -OO so that it works in unittest's verbose mode as well as non-verbose mode. -- Issue #3573: IDLE hangs when passing invalid command line args - (directory(ies) instead of file(s)) (Patch by Guilherme Polo) - - Issue #13694: asynchronous connect in asyncore.dispatcher does not set addr attribute. @@ -889,8 +960,6 @@ - Issue #11199: Fix the with urllib which hangs on particular ftp urls. -- Issue #5219: Prevent event handler cascade in IDLE. - - Issue #14252: Fix subprocess.Popen.terminate() to not raise an error under Windows when the child process has already exited. @@ -904,9 +973,6 @@ - Issue #2945: Make the distutils upload command aware of bdist_rpm products. -- Issue #13447: Add a test file to host regression tests for bugs in the - scripts found in the Tools directory. - - Issue #6884: Fix long-standing bugs with MANIFEST.in parsing in distutils on Windows. @@ -958,9 +1024,68 @@ and problematic Apple llvm-gcc compiler. If original compiler is not available, use clang instead by default. +IDLE +---- + +- IDLE was displaying spurious SystemExit tracebacks when running scripts + that terminated by raising SystemExit (i.e. unittest and turtledemo). + +- Issue #9290: In IDLE the sys.std* streams now implement io.TextIOBase + interface and support all mandatory methods and properties. + +- Issue #16829: IDLE printing no longer fails if there are spaces or other + special characters in the file path. + +- Issue #16819: IDLE method completion now correctly works for unicode literals. + +- Issue #16504: IDLE now catches SyntaxErrors raised by tokenizer. Patch by + Roger Serwy. + +- Issue #1207589: Add Cut/Copy/Paste items to IDLE right click Context Menu + Patch by Todd Rovito. + +- Issue #13052: Fix IDLE crashing when replace string in Search/Replace dialog + ended with '\'. Patch by Roger Serwy. + +- Issue #9803: Don't close IDLE on saving if breakpoint is open. + Patch by Roger Serwy. + +- Issue #14958: Change IDLE systax highlighting to recognize all string and byte + literals currently supported in Python 2.7. + +- Issue #14962: Update text coloring in IDLE shell window after changing + options. Patch by Roger Serwy. + +- Issue #10997: Prevent a duplicate entry in IDLE's "Recent Files" menu. + +- Issue #12510: Attempting to get invalid tooltip no longer closes IDLE. + Original patch by Roger Serwy. + +- Issue #10365: File open dialog now works instead of crashing + even when parent window is closed. Patch by Roger Serwy. + +- Issue #14876: Use user-selected font for highlight configuration. + Patch by Roger Serwy. + +- Issue #14409: IDLE now properly executes commands in the Shell window + when it cannot read the normal config files on startup and + has to use the built-in default key bindings. + There was previously a bug in one of the defaults. + +- Issue #3573: IDLE hangs when passing invalid command line args + (directory(ies) instead of file(s)) (Patch by Guilherme Polo) + +- Issue #5219: Prevent event handler cascade in IDLE. + Tests ----- +- Issue #16702: test_urllib2_localnet tests now correctly ignores proxies for + localhost tests. + +- Issue #13447: Add a test file to host regression tests for bugs in the + scripts found in the Tools directory. + - Issue #11420: make test suite pass with -B/DONTWRITEBYTECODE set. Initial patch by Thomas Wouters. @@ -1090,17 +1215,23 @@ Tools/Demos ----------- +- Issue #17156: pygettext.py now correctly escapes non-ascii characters. + - Issue #15539: Fix a number of bugs in Tools/scripts/pindent.py. Now pindent.py works with a "with" statement. pindent.py no longer produces improper indentation. pindent.py now works with continued lines broken after "class" or "def" keywords and with continuations at the start of line. +- Issue #16476: Fix json.tool to avoid including trailing whitespace. + - Issue #13301: use ast.literal_eval() instead of eval() in Tools/i18n/msgfmt.py Patch by Serhiy Storchaka. Documentation ------------- +- Issue #15041: Update "see also" list in tkinter documentation. + - Issue #17412: update 2.7 Doc/make.bat to also use sphinx-1.0.7. - Issue #17047: remove doubled words in docs and docstrings @@ -1343,21 +1474,8 @@ - Issue #10811: Fix recursive usage of cursors. Instead of crashing, raise a ProgrammingError now. -- Issue #10881: Fix test_site failures with OS X framework builds. - -- Issue #964437 Make IDLE help window non-modal. - Patch by Guilherme Polo and Roger Serwy. - -- Issue #13933: IDLE auto-complete did not work with some imported - module, like hashlib. (Patch by Roger Serwy) - -- Issue #13901: Prevent test_distutils failures on OS X with --enable-shared. - - Issue #13676: Handle strings with embedded zeros correctly in sqlite3. -- Issue #13506: Add '' to path for IDLE Shell when started and restarted with Restart Shell. - Original patches by Marco Scataglini and Roger Serwy. - - Issue #13806: The size check in audioop decompression functions was too strict and could reject valid compressed data. Patch by Oleg Plakhotnyuk. @@ -1396,10 +1514,6 @@ - Issue #8035: urllib: Fix a bug where the client could remain stuck after a redirection or an error. -- Issue #4625: If IDLE cannot write to its recent file or breakpoint - files, display a message popup and continue rather than crash. - (original patch by Roger Serwy) - - tarfile.py: Correctly detect bzip2 compressed streams with blocksizes other than 900k. @@ -1429,9 +1543,6 @@ node when it is the only child of an element. Initial patch by Dan Kenigsberg. -- Issue #8793: Prevent IDLE crash when given strings with invalid hex escape - sequences. - - Issues #1745761, #755670, #13357, #12629, #1200313: HTMLParser now correctly handles non-valid attributes, including adjacent and unquoted attributes. @@ -1454,9 +1565,6 @@ - Issue #10817: Fix urlretrieve function to raise ContentTooShortError even when reporthook is None. Patch by Jyrki Pulliainen. -- Issue #13296: Fix IDLE to clear compile __future__ flags on shell restart. - (Patch by Roger Serwy) - - Issue #7334: close source files on ElementTree.parse and iterparse. - Issue #13232: logging: Improved logging of exceptions in the presence of @@ -1701,6 +1809,28 @@ signature. Without this, architectures where sizeof void* != sizeof int are broken. Patch given by Hallvard B Furuseth. +IDLE +---- + +- Issue #964437 Make IDLE help window non-modal. + Patch by Guilherme Polo and Roger Serwy. + +- Issue #13933: IDLE auto-complete did not work with some imported + module, like hashlib. (Patch by Roger Serwy) + +- Issue #13506: Add '' to path for IDLE Shell when started and restarted with Restart Shell. + Original patches by Marco Scataglini and Roger Serwy. + +- Issue #4625: If IDLE cannot write to its recent file or breakpoint + files, display a message popup and continue rather than crash. + (original patch by Roger Serwy) + +- Issue #8793: Prevent IDLE crash when given strings with invalid hex escape + sequences. + +- Issue #13296: Fix IDLE to clear compile __future__ flags on shell restart. + (Patch by Roger Serwy) + Build ----- @@ -1741,6 +1871,10 @@ - Issue #11689: Fix a variable scoping error in an sqlite3 test. Initial patch by Torsten Landschoff. +- Issue #10881: Fix test_site failures with OS X framework builds. + +- Issue #13901: Prevent test_distutils failures on OS X with --enable-shared. + - Issue #13304: Skip test case if user site-packages disabled (-s or PYTHONNOUSERSITE). (Patch by Carl Meyer) @@ -1913,9 +2047,6 @@ Library ------- -- Issue #12590: IDLE editor window now always displays the first line - when opening a long file. With Tk 8.5, the first line was hidden. - - Issue #12161: Cause StringIO.getvalue() to raise a ValueError when used on a closed StringIO instance. @@ -1937,9 +2068,6 @@ - Issue #12124: zipimport doesn't keep a reference to zlib.decompress() anymore to be able to unload the module. -- Issue #11088: don't crash when using F5 to run a script in IDLE on MacOSX - with Tk 8.5. - - Issue #10154, #10090: change the normalization of UTF-8 to "UTF-8" instead of "UTF8" in the locale module as the latter is not supported MacOSX and OpenBSD. @@ -1959,8 +2087,6 @@ - Issue #12012: ssl.PROTOCOL_SSLv2 becomes optional. -- Issue #11164: Remove obsolete allnodes test from minidom test. - - Issue #11927: SMTP_SSL now uses port 465 by default as documented. Patch by Kasun Herath. @@ -2112,17 +2238,6 @@ - Issue #8275: Fix passing of callback arguments with ctypes under Win64. Patch by Stan Mihai. -- Issue #10940: Workaround an IDLE hang on Mac OS X 10.6 when using the - menu accelerators for Open Module, Go to Line, and New Indent Width. - The accelerators still work but no longer appear in the menu items. - -- Issue #10907: Warn OS X 10.6 IDLE users to use ActiveState Tcl/Tk 8.5, rather - than the currently problematic Apple-supplied one, when running with the - 64-/32-bit installer variant. - -- Issue #11052: Correct IDLE menu accelerators on Mac OS X for Save - commands. - - Issue #10949: Improved robustness of rotating file handlers. - Issue #10955: Fix a potential crash when trying to mmap() a file past its @@ -2131,9 +2246,6 @@ - Issue #10898: Allow compiling the posix module when the C library defines a symbol named FSTAT. -- Issue #6075: IDLE on Mac OS X now works with both Carbon AquaTk and - Cocoa AquaTk. - - Issue #10916: mmap should not segfault when a file is mapped using 0 as length and a non-zero offset, and an attempt to read past the end of file is made (IndexError is raised instead). Patch by Ross Lagerwall. @@ -2192,8 +2304,6 @@ - Issue #6791: Limit header line length (to 65535 bytes) in http.client, to avoid denial of services from the other party. -- Issue #10404: Use ctl-button-1 on OSX for the context menu in Idle. - - Issue #9907: Fix tab handling on OSX when using editline by calling rl_initialize first, then setting our custom defaults, then reading .editrc. @@ -2211,11 +2321,6 @@ - Issue #10695: passing the port as a string value to telnetlib no longer causes debug mode to fail. -- Issue #10107: Warn about unsaved files in IDLE on OSX. - -- Issue #10406: Enable Rstrip IDLE extension on OSX (just like on other - platforms). - - Issue #10478: Reentrant calls inside buffered IO objects (for example by way of a signal handler) now raise a RuntimeError instead of freezing the current process. @@ -2262,6 +2367,39 @@ - Issue #678250: Make mmap flush a noop on ACCESS_READ and ACCESS_COPY. +IDLE +---- + +- Issue #11718: IDLE's open module dialog couldn't find the __init__.py + file in a package. + +- Issue #12590: IDLE editor window now always displays the first line + when opening a long file. With Tk 8.5, the first line was hidden. + +- Issue #11088: don't crash when using F5 to run a script in IDLE on MacOSX + with Tk 8.5. + +- Issue #10940: Workaround an IDLE hang on Mac OS X 10.6 when using the + menu accelerators for Open Module, Go to Line, and New Indent Width. + The accelerators still work but no longer appear in the menu items. + +- Issue #10907: Warn OS X 10.6 IDLE users to use ActiveState Tcl/Tk 8.5, rather + than the currently problematic Apple-supplied one, when running with the + 64-/32-bit installer variant. + +- Issue #11052: Correct IDLE menu accelerators on Mac OS X for Save + commands. + +- Issue #6075: IDLE on Mac OS X now works with both Carbon AquaTk and + Cocoa AquaTk. + +- Issue #10404: Use ctl-button-1 on OSX for the context menu in Idle. + +- Issue #10107: Warn about unsaved files in IDLE on OSX. + +- Issue #10406: Enable Rstrip IDLE extension on OSX (just like on other + platforms). + Build ----- @@ -2307,15 +2445,11 @@ - Issue #1099: Fix the build on MacOSX when building a framework with pydebug using GCC 4.0. -IDLE ----- - -- Issue #11718: IDLE's open module dialog couldn't find the __init__.py - file in a package. - Tests ----- +- Issue #11164: Remove obsolete allnodes test from minidom test. + - Issue #12205: Fix test_subprocess failure due to uninstalled test data. - Issue #5723: Improve json tests to be executed with and without accelerations. @@ -2384,19 +2518,22 @@ - Issue #4493: urllib2 adds '/' in front of path components which does not start with '/. Common behavior exhibited by browsers and other clients. +- Issue #10407: Fix one NameError in distutils. + +- Issue #10198: fix duplicate header written to wave files when writeframes() + is called without data. + +- Issue #10467: Fix BytesIO.readinto() after seeking into a position after the + end of the file. + +- Issue #5111: IPv6 Host in the Header is wrapped inside [ ]. Patch by Chandru. + +IDLE +---- + - Issue #6378: idle.bat now runs with the appropriate Python version rather than the system default. Patch by Sridhar Ratnakumar. -- Issue #10407: Fix one NameError in distutils. - -- Issue #10198: fix duplicate header written to wave files when writeframes() - is called without data. - -- Issue #10467: Fix BytesIO.readinto() after seeking into a position after the - end of the file. - -- Issue #5111: IPv6 Host in the Header is wrapped inside [ ]. Patch by Chandru. - Build ----- @@ -5046,9 +5183,6 @@ - Issue #6048: Now Distutils uses the tarfile module in archive_util. -- Issue #5150: IDLE's format menu now has an option to strip trailing - whitespace. - - Issue #6121: pydoc now ignores leading and trailing spaces in the argument to the 'help' function. @@ -5707,6 +5841,14 @@ - Windows locale mapping updated to Vista. +IDLE +---- + +- Issue #5150: IDLE's format menu now has an option to strip trailing + whitespace. + +- Issue #5847: Remove -n switch on "Edit with IDLE" menu item. + Tools/Demos ----------- @@ -5740,8 +5882,6 @@ - Issue #6094: Build correctly with Subversion 1.7. -- Issue #5847: Remove -n switch on "Edit with IDLE" menu item. - - Issue #5726: Make Modules/ld_so_aix return the actual exit code of the linker, rather than always exit successfully. Patch by Floris Bruynooghe. @@ -8561,9 +8701,6 @@ Allows the demo2 function to be executed on its own instead of only when the module is run as a script. -- Bug #813342: Start the IDLE subprocess with -Qnew if the parent is - started with that option. - - Bug #1565150: Fix subsecond processing for os.utime on Windows. - Support for MSVC 8 was added to bdist_wininst. @@ -8612,9 +8749,6 @@ - Bug #1531862: Do not close standard file descriptors in subprocess. -- idle: Honor the "Cancel" action in the save dialog (Debian bug - #299092). - - Fix utf-8-sig incremental decoder, which didn't recognise a BOM when the first chunk fed to the decoder started with a BOM, but was longer than 3 bytes. @@ -8857,6 +8991,15 @@ - The sqlite3 module was updated to pysqlite 2.4.1. +IDLE +---- + +- Bug #813342: Start the IDLE subprocess with -Qnew if the parent is + started with that option. + +- IDLE: Honor the "Cancel" action in the save dialog (Debian bug + #299092). + Tests ----- diff --git a/Misc/RPM/python-2.7.spec b/Misc/RPM/python-2.7.spec --- a/Misc/RPM/python-2.7.spec +++ b/Misc/RPM/python-2.7.spec @@ -39,7 +39,7 @@ %define name python #--start constants-- -%define version 2.7.4 +%define version 2.7.5 %define libvers 2.7 #--end constants-- %define release 1pydotorg diff --git a/Modules/_ctypes/libffi/src/dlmalloc.c b/Modules/_ctypes/libffi/src/dlmalloc.c --- a/Modules/_ctypes/libffi/src/dlmalloc.c +++ b/Modules/_ctypes/libffi/src/dlmalloc.c @@ -457,6 +457,11 @@ #define LACKS_ERRNO_H #define MALLOC_FAILURE_ACTION #define MMAP_CLEARS 0 /* WINCE and some others apparently don't clear */ +#elif !defined _GNU_SOURCE +/* mremap() on Linux requires this via sys/mman.h + * See roundup issue 10309 + */ +#define _GNU_SOURCE 1 #endif /* WIN32 */ #ifdef __OS2__ diff --git a/Modules/_multiprocessing/multiprocessing.c b/Modules/_multiprocessing/multiprocessing.c --- a/Modules/_multiprocessing/multiprocessing.c +++ b/Modules/_multiprocessing/multiprocessing.c @@ -63,7 +63,7 @@ break; default: PyErr_Format(PyExc_RuntimeError, - "unkown error number %d", num); + "unknown error number %d", num); } return NULL; } diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c --- a/Modules/_sqlite/cursor.c +++ b/Modules/_sqlite/cursor.c @@ -732,7 +732,7 @@ Py_DECREF(self->lastrowid); if (!multiple && statement_type == STATEMENT_INSERT) { - sqlite3_int64 lastrowid; + sqlite_int64 lastrowid; Py_BEGIN_ALLOW_THREADS lastrowid = sqlite3_last_insert_rowid(self->connection->db); Py_END_ALLOW_THREADS diff --git a/Modules/_sqlite/util.c b/Modules/_sqlite/util.c --- a/Modules/_sqlite/util.c +++ b/Modules/_sqlite/util.c @@ -111,7 +111,7 @@ #endif PyObject * -_pysqlite_long_from_int64(sqlite3_int64 value) +_pysqlite_long_from_int64(sqlite_int64 value) { #ifdef HAVE_LONG_LONG # if SIZEOF_LONG_LONG < 8 @@ -135,7 +135,7 @@ return PyInt_FromLong(value); } -sqlite3_int64 +sqlite_int64 _pysqlite_long_as_int64(PyObject * py_val) { int overflow; @@ -158,8 +158,8 @@ #endif return value; } - else if (sizeof(value) < sizeof(sqlite3_int64)) { - sqlite3_int64 int64val; + else if (sizeof(value) < sizeof(sqlite_int64)) { + sqlite_int64 int64val; if (_PyLong_AsByteArray((PyLongObject *)py_val, (unsigned char *)&int64val, sizeof(int64val), IS_LITTLE_ENDIAN, 1 /* signed */) >= 0) { diff --git a/Modules/_sqlite/util.h b/Modules/_sqlite/util.h --- a/Modules/_sqlite/util.h +++ b/Modules/_sqlite/util.h @@ -36,7 +36,7 @@ */ int _pysqlite_seterror(sqlite3* db, sqlite3_stmt* st); -PyObject * _pysqlite_long_from_int64(sqlite3_int64 value); -sqlite3_int64 _pysqlite_long_as_int64(PyObject * value); +PyObject * _pysqlite_long_from_int64(sqlite_int64 value); +sqlite_int64 _pysqlite_long_as_int64(PyObject * value); #endif diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -1813,7 +1813,7 @@ ; test_structmembers *ob; const char *s = NULL; - Py_ssize_t string_len = 0; + int string_len = 0; ob = PyObject_New(test_structmembers, type); if (ob == NULL) return NULL; diff --git a/Modules/cPickle.c b/Modules/cPickle.c --- a/Modules/cPickle.c +++ b/Modules/cPickle.c @@ -3643,17 +3643,19 @@ /* Strip outermost quotes */ - while (s[len-1] <= ' ') + while (len > 0 && s[len-1] <= ' ') len--; - if(s[0]=='"' && s[len-1]=='"'){ + if (len > 1 && s[0]=='"' && s[len-1]=='"') { s[len-1] = '\0'; p = s + 1 ; len -= 2; - } else if(s[0]=='\'' && s[len-1]=='\''){ + } + else if (len > 1 && s[0]=='\'' && s[len-1]=='\'') { s[len-1] = '\0'; p = s + 1 ; len -= 2; - } else + } + else goto insecure; /********************************************/ diff --git a/Modules/dbmmodule.c b/Modules/dbmmodule.c --- a/Modules/dbmmodule.c +++ b/Modules/dbmmodule.c @@ -168,11 +168,13 @@ dbm_contains(register dbmobject *dp, PyObject *v) { datum key, val; + char *ptr; + Py_ssize_t size; - if (PyString_AsStringAndSize(v, (char **)&key.dptr, - (Py_ssize_t *)&key.dsize)) { + if (PyString_AsStringAndSize(v, &ptr, &size)) return -1; - } + key.dptr = ptr; + key.dsize = size; /* Expand check_dbmobject_open to return -1 */ if (dp->di_dbm == NULL) { diff --git a/Modules/operator.c b/Modules/operator.c --- a/Modules/operator.c +++ b/Modules/operator.c @@ -412,8 +412,8 @@ "itemgetter(item, ...) --> itemgetter object\n\ \n\ Return a callable object that fetches the given item(s) from its operand.\n\ -After, f=itemgetter(2), the call f(r) returns r[2].\n\ -After, g=itemgetter(2,5,3), the call g(r) returns (r[2], r[5], r[3])"); +After f = itemgetter(2), the call f(r) returns r[2].\n\ +After g = itemgetter(2, 5, 3), the call g(r) returns (r[2], r[5], r[3])"); static PyTypeObject itemgetter_type = { PyVarObject_HEAD_INIT(NULL, 0) @@ -592,9 +592,9 @@ "attrgetter(attr, ...) --> attrgetter object\n\ \n\ Return a callable object that fetches the given attribute(s) from its operand.\n\ -After, f=attrgetter('name'), the call f(r) returns r.name.\n\ -After, g=attrgetter('name', 'date'), the call g(r) returns (r.name, r.date).\n\ -After, h=attrgetter('name.first', 'name.last'), the call h(r) returns\n\ +After f = attrgetter('name'), the call f(r) returns r.name.\n\ +After g = attrgetter('name', 'date'), the call g(r) returns (r.name, r.date).\n\ +After h = attrgetter('name.first', 'name.last'), the call h(r) returns\n\ (r.name.first, r.name.last)."); static PyTypeObject attrgetter_type = { @@ -724,8 +724,8 @@ "methodcaller(name, ...) --> methodcaller object\n\ \n\ Return a callable object that calls the given method on its operand.\n\ -After, f = methodcaller('name'), the call f(r) returns r.name().\n\ -After, g = methodcaller('name', 'date', foo=1), the call g(r) returns\n\ +After f = methodcaller('name'), the call f(r) returns r.name().\n\ +After g = methodcaller('name', 'date', foo=1), the call g(r) returns\n\ r.name('date', foo=1)."); static PyTypeObject methodcaller_type = { diff --git a/Modules/readline.c b/Modules/readline.c --- a/Modules/readline.c +++ b/Modules/readline.c @@ -69,6 +69,10 @@ int num_matches, int max_length); +/* Memory allocated for rl_completer_word_break_characters + (see issue #17289 for the motivation). */ +static char *completer_word_break_characters; + /* Exported function to send one line to readline's init file parser */ static PyObject * @@ -344,12 +348,20 @@ { char *break_chars; - if(!PyArg_ParseTuple(args, "s:set_completer_delims", &break_chars)) { + if (!PyArg_ParseTuple(args, "s:set_completer_delims", &break_chars)) { return NULL; } - free((void*)rl_completer_word_break_characters); - rl_completer_word_break_characters = strdup(break_chars); - Py_RETURN_NONE; + /* Keep a reference to the allocated memory in the module state in case + some other module modifies rl_completer_word_break_characters + (see issue #17289). */ + free(completer_word_break_characters); + completer_word_break_characters = strdup(break_chars); + if (completer_word_break_characters) { + rl_completer_word_break_characters = completer_word_break_characters; + Py_RETURN_NONE; + } + else + return PyErr_NoMemory(); } PyDoc_STRVAR(doc_set_completer_delims, @@ -893,7 +905,8 @@ /* Set our completion function */ rl_attempted_completion_function = (CPPFunction *)flex_complete; /* Set Python word break characters */ - rl_completer_word_break_characters = + completer_word_break_characters = + rl_completer_word_break_characters = strdup(" \t\n`~!@#$%^&*()-=+[{]}\\|;:'\",<>/?"); /* All nonalphanums except '.' */ @@ -906,7 +919,7 @@ */ #ifdef __APPLE__ if (using_libedit_emulation) - rl_read_init_file(NULL); + rl_read_init_file(NULL); else #endif /* __APPLE__ */ rl_initialize(); @@ -1137,8 +1150,6 @@ if (m == NULL) return; - - PyOS_ReadlineFunctionPointer = call_readline; setup_readline(); } diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -1203,6 +1203,23 @@ # error uintptr_t does not match int, long, or long long! #endif +/* + * kevent is not standard and its members vary across BSDs. + */ +#if !defined(__OpenBSD__) +# define IDENT_TYPE T_UINTPTRT +# define IDENT_CAST Py_intptr_t +# define DATA_TYPE T_INTPTRT +# define DATA_FMT_UNIT INTPTRT_FMT_UNIT +# define IDENT_AsType PyLong_AsUintptr_t +#else +# define IDENT_TYPE T_UINT +# define IDENT_CAST int +# define DATA_TYPE T_INT +# define DATA_FMT_UNIT "i" +# define IDENT_AsType PyLong_AsUnsignedLong +#endif + /* Unfortunately, we can't store python objects in udata, because * kevents in the kernel can be removed without warning, which would * forever lose the refcount on the object stored with it. @@ -1210,11 +1227,11 @@ #define KQ_OFF(x) offsetof(kqueue_event_Object, x) static struct PyMemberDef kqueue_event_members[] = { - {"ident", T_UINTPTRT, KQ_OFF(e.ident)}, + {"ident", IDENT_TYPE, KQ_OFF(e.ident)}, {"filter", T_SHORT, KQ_OFF(e.filter)}, {"flags", T_USHORT, KQ_OFF(e.flags)}, {"fflags", T_UINT, KQ_OFF(e.fflags)}, - {"data", T_INTPTRT, KQ_OFF(e.data)}, + {"data", DATA_TYPE, KQ_OFF(e.data)}, {"udata", T_UINTPTRT, KQ_OFF(e.udata)}, {NULL} /* Sentinel */ }; @@ -1240,7 +1257,7 @@ PyObject *pfd; static char *kwlist[] = {"ident", "filter", "flags", "fflags", "data", "udata", NULL}; - static char *fmt = "O|hhi" INTPTRT_FMT_UNIT UINTPTRT_FMT_UNIT ":kevent"; + static char *fmt = "O|hhi" DATA_FMT_UNIT UINTPTRT_FMT_UNIT ":kevent"; EV_SET(&(self->e), 0, EVFILT_READ, EV_ADD, 0, 0, 0); /* defaults */ @@ -1250,8 +1267,12 @@ return -1; } - if (PyLong_Check(pfd)) { - self->e.ident = PyLong_AsUintptr_t(pfd); + if (PyLong_Check(pfd) +#if IDENT_TYPE == T_UINT + && PyLong_AsUnsignedLong(pfd) <= UINT_MAX +#endif + ) { + self->e.ident = IDENT_AsType(pfd); } else { self->e.ident = PyObject_AsFileDescriptor(pfd); @@ -1279,10 +1300,10 @@ Py_TYPE(s)->tp_name, Py_TYPE(o)->tp_name); return NULL; } - if (((result = s->e.ident - o->e.ident) == 0) && + if (((result = (IDENT_CAST)(s->e.ident - o->e.ident)) == 0) && ((result = s->e.filter - o->e.filter) == 0) && ((result = s->e.flags - o->e.flags) == 0) && - ((result = s->e.fflags - o->e.fflags) == 0) && + ((result = (int)(s->e.fflags - o->e.fflags)) == 0) && ((result = s->e.data - o->e.data) == 0) && ((result = s->e.udata - o->e.udata) == 0) ) { diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -321,7 +321,10 @@ Handlers[sig_num].tripped = 0; Py_INCREF(obj); Handlers[sig_num].func = obj; - return old_handler; + if (old_handler != NULL) + return old_handler; + else + Py_RETURN_NONE; } PyDoc_STRVAR(signal_doc, @@ -349,8 +352,13 @@ return NULL; } old_handler = Handlers[sig_num].func; - Py_INCREF(old_handler); - return old_handler; + if (old_handler != NULL) { + Py_INCREF(old_handler); + return old_handler; + } + else { + Py_RETURN_NONE; + } } PyDoc_STRVAR(getsignal_doc, diff --git a/Modules/sre.h b/Modules/sre.h --- a/Modules/sre.h +++ b/Modules/sre.h @@ -23,8 +23,8 @@ # define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX + 1u) # endif #else -# define SRE_CODE unsigned long -# if SIZEOF_SIZE_T > SIZEOF_LONG +# define SRE_CODE unsigned int +# if SIZEOF_SIZE_T > SIZEOF_INT # define SRE_MAXREPEAT (~(SRE_CODE)0) # else # define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX + 1u) diff --git a/Objects/dictobject.c b/Objects/dictobject.c --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -2919,6 +2919,10 @@ return NULL; seq_str = PyObject_Repr(seq); + if (seq_str == NULL) { + Py_DECREF(seq); + return NULL; + } result = PyString_FromFormat("%s(%s)", Py_TYPE(dv)->tp_name, PyString_AS_STRING(seq_str)); Py_DECREF(seq_str); diff --git a/PCbuild/rt.bat b/PCbuild/rt.bat --- a/PCbuild/rt.bat +++ b/PCbuild/rt.bat @@ -30,7 +30,7 @@ set suffix= set qmode= set dashO= -set tcltk= +set tcltk=tcltk :CheckOpts if "%1"=="-O" (set dashO=-O) & shift & goto CheckOpts @@ -38,7 +38,7 @@ if "%1"=="-d" (set suffix=_d) & shift & goto CheckOpts if "%1"=="-x64" (set prefix=amd64) & (set tcltk=tcltk64) & shift & goto CheckOpts -PATH %PATH%;..\..\%tcltk%\bin +PATH %PATH%;%~dp0..\..\%tcltk%\bin set exe=%prefix%\python%suffix% set cmd=%exe% %dashO% -Wd -3 -E -tt ../lib/test/regrtest.py %1 %2 %3 %4 %5 %6 %7 %8 %9 if defined qmode goto Qmode diff --git a/README b/README --- a/README +++ b/README @@ -1,4 +1,4 @@ -This is Python version 2.7.4 +This is Python version 2.7.5 ============================ Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, diff --git a/Tools/scripts/gprof2html.py b/Tools/scripts/gprof2html.py --- a/Tools/scripts/gprof2html.py +++ b/Tools/scripts/gprof2html.py @@ -1,4 +1,4 @@ -#! /usr/bin/env python2.3 +#! /usr/bin/env python """Transform gprof(1) output into useful HTML.""" diff --git a/configure b/configure --- a/configure +++ b/configure @@ -6253,7 +6253,7 @@ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether gcc supports ParseTuple __format__" >&5 $as_echo_n "checking whether gcc supports ParseTuple __format__... " >&6; } save_CFLAGS=$CFLAGS - CFLAGS="$CFLAGS -Werror" + CFLAGS="$CFLAGS -Werror -Wformat" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ diff --git a/configure.ac b/configure.ac --- a/configure.ac +++ b/configure.ac @@ -1326,7 +1326,7 @@ then AC_MSG_CHECKING(whether gcc supports ParseTuple __format__) save_CFLAGS=$CFLAGS - CFLAGS="$CFLAGS -Werror" + CFLAGS="$CFLAGS -Werror -Wformat" AC_COMPILE_IFELSE([ AC_LANG_PROGRAM([[void f(char*,...)__attribute((format(PyArg_ParseTuple, 1, 2)));]], [[]]) ],[ diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -437,9 +437,11 @@ def detect_modules(self): # Ensure that /usr/local is always used - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - self.add_gcc_paths() + if not cross_compiling: + add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') + add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') + if cross_compiling: + self.add_gcc_paths() self.add_multiarch_paths() # Add paths specified in the environment variables LDFLAGS and -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 21:51:34 2013 From: python-checkins at python.org (david.wolever) Date: Mon, 12 Aug 2013 21:51:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAobWVyZ2UgMi43IC0+IDIuNyk6?= =?utf-8?q?_Issue_=2317701=3A_Improving_strftime_documentation?= Message-ID: <3cDSP21dRCz7LjM@mail.python.org> http://hg.python.org/cpython/rev/adbc9789a5e4 changeset: 85139:adbc9789a5e4 branch: 2.7 parent: 85121:9ddc63c039ba parent: 85138:0f4d971b0cee user: David Wolever date: Mon Aug 12 15:50:10 2013 -0400 summary: Issue #17701: Improving strftime documentation files: Doc/library/datetime.rst | 260 ++++++++++++++------------ Misc/NEWS | 1 + 2 files changed, 142 insertions(+), 119 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -551,8 +551,9 @@ .. method:: date.strftime(format) Return a string representing the date, controlled by an explicit format string. - Format codes referring to hours, minutes or seconds will see 0 values. See - section :ref:`strftime-strptime-behavior`. + Format codes referring to hours, minutes or seconds will see 0 values. For a + complete list of formatting directives, see section + :ref:`strftime-strptime-behavior`. .. method:: date.__format__(format) @@ -730,7 +731,8 @@ *format*. This is equivalent to ``datetime(*(time.strptime(date_string, format)[0:6]))``. :exc:`ValueError` is raised if the date_string and format can't be parsed by :func:`time.strptime` or if it returns a value which isn't a - time tuple. See section :ref:`strftime-strptime-behavior`. + time tuple. For a complete list of formatting directives, see section + :ref:`strftime-strptime-behavior`. .. versionadded:: 2.5 @@ -1050,7 +1052,8 @@ .. method:: datetime.strftime(format) Return a string representing the date and time, controlled by an explicit format - string. See section :ref:`strftime-strptime-behavior`. + string. For a complete list of formatting directives, see section + :ref:`strftime-strptime-behavior`. .. method:: datetime.__format__(format) @@ -1283,7 +1286,8 @@ .. method:: time.strftime(format) Return a string representing the time, controlled by an explicit format string. - See section :ref:`strftime-strptime-behavior`. + For a complete list of formatting directives, see section + :ref:`strftime-strptime-behavior`. .. method:: time.__format__(format) @@ -1597,27 +1601,6 @@ microseconds should not be used, as :class:`date` objects have no such values. If they're used anyway, ``0`` is substituted for them. -.. versionadded:: 2.6 - :class:`.time` and :class:`.datetime` objects support a ``%f`` format code - which expands to the number of microseconds in the object, zero-padded on - the left to six places. - -For a naive object, the ``%z`` and ``%Z`` format codes are replaced by empty -strings. - -For an aware object: - -``%z`` - :meth:`utcoffset` is transformed into a 5-character string of the form +HHMM or - -HHMM, where HH is a 2-digit string giving the number of UTC offset hours, and - MM is a 2-digit string giving the number of UTC offset minutes. For example, if - :meth:`utcoffset` returns ``timedelta(hours=-3, minutes=-30)``, ``%z`` is - replaced with the string ``'-0330'``. - -``%Z`` - If :meth:`tzname` returns ``None``, ``%Z`` is replaced by an empty string. - Otherwise ``%Z`` is replaced by the returned value, which must be a string. - The full set of format codes supported varies across platforms, because Python calls the platform C library's :func:`strftime` function, and platform variations are common. @@ -1630,99 +1613,115 @@ The exact range of years for which :meth:`strftime` works also varies across platforms. Regardless of platform, years before 1900 cannot be used. -+-----------+--------------------------------+-------+ -| Directive | Meaning | Notes | -+===========+================================+=======+ -| ``%a`` | Locale's abbreviated weekday | | -| | name. | | -+-----------+--------------------------------+-------+ -| ``%A`` | Locale's full weekday name. | | -+-----------+--------------------------------+-------+ -| ``%b`` | Locale's abbreviated month | | -| | name. | | -+-----------+--------------------------------+-------+ -| ``%B`` | Locale's full month name. | | -+-----------+--------------------------------+-------+ -| ``%c`` | Locale's appropriate date and | | -| | time representation. | | -+-----------+--------------------------------+-------+ -| ``%d`` | Day of the month as a decimal | | -| | number [01,31]. | | -+-----------+--------------------------------+-------+ -| ``%f`` | Microsecond as a decimal | \(1) | -| | number [0,999999], zero-padded | | -| | on the left | | -+-----------+--------------------------------+-------+ -| ``%H`` | Hour (24-hour clock) as a | | -| | decimal number [00,23]. | | -+-----------+--------------------------------+-------+ -| ``%I`` | Hour (12-hour clock) as a | | -| | decimal number [01,12]. | | -+-----------+--------------------------------+-------+ -| ``%j`` | Day of the year as a decimal | | -| | number [001,366]. | | -+-----------+--------------------------------+-------+ -| ``%m`` | Month as a decimal number | | -| | [01,12]. | | -+-----------+--------------------------------+-------+ -| ``%M`` | Minute as a decimal number | | -| | [00,59]. | | -+-----------+--------------------------------+-------+ -| ``%p`` | Locale's equivalent of either | \(2) | -| | AM or PM. | | -+-----------+--------------------------------+-------+ -| ``%S`` | Second as a decimal number | \(3) | -| | [00,61]. | | -+-----------+--------------------------------+-------+ -| ``%U`` | Week number of the year | \(4) | -| | (Sunday as the first day of | | -| | the week) as a decimal number | | -| | [00,53]. All days in a new | | -| | year preceding the first | | -| | Sunday are considered to be in | | -| | week 0. | | -+-----------+--------------------------------+-------+ -| ``%w`` | Weekday as a decimal number | | -| | [0(Sunday),6]. | | -+-----------+--------------------------------+-------+ -| ``%W`` | Week number of the year | \(4) | -| | (Monday as the first day of | | -| | the week) as a decimal number | | -| | [00,53]. All days in a new | | -| | year preceding the first | | -| | Monday are considered to be in | | -| | week 0. | | -+-----------+--------------------------------+-------+ -| ``%x`` | Locale's appropriate date | | -| | representation. | | -+-----------+--------------------------------+-------+ -| ``%X`` | Locale's appropriate time | | -| | representation. | | -+-----------+--------------------------------+-------+ -| ``%y`` | Year without century as a | | -| | decimal number [00,99]. | | -+-----------+--------------------------------+-------+ -| ``%Y`` | Year with century as a decimal | | -| | number. | | -+-----------+--------------------------------+-------+ -| ``%z`` | UTC offset in the form +HHMM | \(5) | -| | or -HHMM (empty string if the | | -| | the object is naive). | | -+-----------+--------------------------------+-------+ -| ``%Z`` | Time zone name (empty string | | -| | if the object is naive). | | -+-----------+--------------------------------+-------+ -| ``%%`` | A literal ``'%'`` character. | | -+-----------+--------------------------------+-------+ ++-----------+--------------------------------+------------------------+-------+ +| Directive | Meaning | Example | Notes | ++===========+================================+========================+=======+ +| ``%a`` | Weekday as locale's || Sun, Mon, ..., Sat | \(1) | +| | abbreviated name. | (en_US); | | +| | || So, Mo, ..., Sa | | +| | | (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%A`` | Weekday as locale's full name. || Sunday, Monday, ..., | \(1) | +| | | Saturday (en_US); | | +| | || Sonntag, Montag, ..., | | +| | | Samstag (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%w`` | Weekday as a decimal number, | 0, 1, ..., 6 | | +| | where 0 is Sunday and 6 is | | | +| | Saturday. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%d`` | Day of the month as a | 01, 02, ..., 31 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%b`` | Month as locale's abbreviated || Jan, Feb, ..., Dec | \(1) | +| | name. | (en_US); | | +| | || Jan, Feb, ..., Dez | | +| | | (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%B`` | Month as locale's full name. || January, February, | \(1) | +| | | ..., December (en_US);| | +| | || Januar, Februar, ..., | | +| | | Dezember (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%m`` | Month as a zero-padded | 01, 02, ..., 12 | | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%y`` | Year without century as a | 00, 01, ..., 99 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%Y`` | Year with century as a decimal | 1970, 1988, 2001, 2013 | | +| | number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%H`` | Hour (24-hour clock) as a | 00, 01, ..., 23 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%I`` | Hour (12-hour clock) as a | 01, 02, ..., 12 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%p`` | Locale's equivalent of either || AM, PM (en_US); | \(1), | +| | AM or PM. || am, pm (de_DE) | \(2) | ++-----------+--------------------------------+------------------------+-------+ +| ``%M`` | Minute as a zero-padded | 00, 01, ..., 59 | | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%S`` | Second as a zero-padded | 00, 01, ..., 61 | \(3) | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%f`` | Microsecond as a decimal | 000000, 000001, ..., | \(4) | +| | number, zero-padded on the | 999999 | | +| | left. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%z`` | UTC offset in the form +HHMM | (empty), +0000, -0400, | \(5) | +| | or -HHMM (empty string if the | +1030 | | +| | the object is naive). | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%Z`` | Time zone name (empty string | (empty), UTC, EST, CST | | +| | if the object is naive). | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%j`` | Day of the year as a | 001, 002, ..., 366 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%U`` | Week number of the year | 00, 01, ..., 53 | \(6) | +| | (Sunday as the first day of | | | +| | the week) as a zero padded | | | +| | decimal number. All days in a | | | +| | new year preceding the first | | | +| | Sunday are considered to be in | | | +| | week 0. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%W`` | Week number of the year | 00, 01, ..., 53 | \(6) | +| | (Monday as the first day of | | | +| | the week) as a decimal number. | | | +| | All days in a new year | | | +| | preceding the first Monday | | | +| | are considered to be in | | | +| | week 0. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%c`` | Locale's appropriate date and || Tue Aug 16 21:30:00 | \(1) | +| | time representation. | 1988 (en_US); | | +| | || Di 16 Aug 21:30:00 | | +| | | 1988 (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%x`` | Locale's appropriate date || 08/16/88 (None); | \(1) | +| | representation. || 08/16/1988 (en_US); | | +| | || 16.08.1988 (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%X`` | Locale's appropriate time || 21:30:00 (en_US); | \(1) | +| | representation. || 21:30:00 (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%%`` | A literal ``'%'`` character. | % | | ++-----------+--------------------------------+------------------------+-------+ Notes: (1) - When used with the :meth:`strptime` method, the ``%f`` directive - accepts from one to six digits and zero pads on the right. ``%f`` is - an extension to the set of format characters in the C standard (but - implemented separately in datetime objects, and therefore always - available). + Because the format depends on the current locale, care should be taken when + making assumptions about the output value. Field orderings will vary (for + example, "month/day/year" versus "day/month/year"), and the output may + contain Unicode characters encoded using the locale's default encoding (for + example, if the current locale is ``js_JP``, the default encoding could be + any one of ``eucJP``, ``SJIS``, or ``utf-8``; use :meth:`locale.getlocale` + to determine the current locale's encoding). (2) When used with the :meth:`strptime` method, the ``%p`` directive only affects @@ -1737,12 +1736,35 @@ produce them in :func:`strftime` output. (4) - When used with the :meth:`strptime` method, ``%U`` and ``%W`` are only used in - calculations when the day of the week and the year are specified. + ``%f`` is an extension to the set of format characters in the C standard + (but implemented separately in datetime objects, and therefore always + available). When used with the :meth:`strptime` method, the ``%f`` + directive accepts from one to six digits and zero pads on the right. + + .. versionadded:: 2.6 (5) - For example, if :meth:`utcoffset` returns ``timedelta(hours=-3, minutes=-30)``, - ``%z`` is replaced with the string ``'-0330'``. + For a naive object, the ``%z`` and ``%Z`` format codes are replaced by empty + strings. + + For an aware object: + + ``%z`` + :meth:`utcoffset` is transformed into a 5-character string of the form + +HHMM or -HHMM, where HH is a 2-digit string giving the number of UTC + offset hours, and MM is a 2-digit string giving the number of UTC offset + minutes. For example, if :meth:`utcoffset` returns + ``timedelta(hours=-3, minutes=-30)``, ``%z`` is replaced with the string + ``'-0330'``. + + ``%Z`` + If :meth:`tzname` returns ``None``, ``%Z`` is replaced by an empty + string. Otherwise ``%Z`` is replaced by the returned value, which must + be a string. + +(6) + When used with the :meth:`strptime` method, ``%U`` and ``%W`` are only used + in calculations when the day of the week and the year are specified. .. rubric:: Footnotes diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -164,6 +164,7 @@ Documentation ------------- +- Issue #17701: Improving strftime documentation. - Issue #17844: Refactor a documentation of Python specific encodings. Add links to encoders and decoders for binary-to-binary codecs. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 22:39:20 2013 From: python-checkins at python.org (antoine.pitrou) Date: Mon, 12 Aug 2013 22:39:20 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318585=3A_Add_=3Af?= =?utf-8?q?unc=3A=60textwrap=2Eshorten=60_to_collapse_and_truncate_a_piece?= =?utf-8?q?_of?= Message-ID: <3cDTS82VgRz7LjV@mail.python.org> http://hg.python.org/cpython/rev/c27ec198d3d1 changeset: 85140:c27ec198d3d1 parent: 85132:b788d04f14d6 user: Antoine Pitrou date: Mon Aug 12 22:39:09 2013 +0200 summary: Issue #18585: Add :func:`textwrap.shorten` to collapse and truncate a piece of text to a given length. files: Doc/library/textwrap.rst | 60 +++++++++++++++++------ Lib/test/test_textwrap.py | 68 +++++++++++++++++++++++--- Lib/textwrap.py | 53 ++++++++++++++++++++- Misc/NEWS | 3 + 4 files changed, 156 insertions(+), 28 deletions(-) diff --git a/Doc/library/textwrap.rst b/Doc/library/textwrap.rst --- a/Doc/library/textwrap.rst +++ b/Doc/library/textwrap.rst @@ -10,11 +10,11 @@ -------------- -The :mod:`textwrap` module provides two convenience functions, :func:`wrap` and -:func:`fill`, as well as :class:`TextWrapper`, the class that does all the work, -and two utility functions, :func:`dedent` and :func:`indent`. If you're just wrapping or filling one -or two text strings, the convenience functions should be good enough; -otherwise, you should use an instance of :class:`TextWrapper` for efficiency. +The :mod:`textwrap` module provides some convenience functions, +as well as :class:`TextWrapper`, the class that does all the work. +If you're just wrapping or filling one or two text strings, the convenience +functions should be good enough; otherwise, you should use an instance of +:class:`TextWrapper` for efficiency. .. function:: wrap(text, width=70, **kwargs) @@ -39,19 +39,24 @@ In particular, :func:`fill` accepts exactly the same keyword arguments as :func:`wrap`. -Both :func:`wrap` and :func:`fill` work by creating a :class:`TextWrapper` -instance and calling a single method on it. That instance is not reused, so for -applications that wrap/fill many text strings, it will be more efficient for you -to create your own :class:`TextWrapper` object. -Text is preferably wrapped on whitespaces and right after the hyphens in -hyphenated words; only then will long words be broken if necessary, unless -:attr:`TextWrapper.break_long_words` is set to false. +.. function:: shorten(text, width=70, *, placeholder=" (...)") -Two additional utility function, :func:`dedent` and :func:`indent`, are -provided to remove indentation from strings that have unwanted whitespace -to the left of the text and to add an arbitrary prefix to selected lines -in a block of text. + Collapse and truncate the given text to fit in the given width. + + The text first has its whitespace collapsed. If it then fits in + the *width*, it is returned unchanged. Otherwise, as many words + as possible are joined and then the *placeholder* is appended:: + + >>> textwrap.shorten("Hello world!", width=12) + 'Hello world!' + >>> textwrap.shorten("Hello world!", width=11) + 'Hello (...)' + >>> textwrap.shorten("Hello world", width=10, placeholder="...") + 'Hello...' + + .. versionadded:: 3.4 + .. function:: dedent(text) @@ -102,6 +107,16 @@ + world +:func:`wrap`, :func:`fill` and :func:`shorten` work by creating a +:class:`TextWrapper` instance and calling a single method on it. That +instance is not reused, so for applications that process many text +strings, it may be more efficient to create your own +:class:`TextWrapper` object. + +Text is preferably wrapped on whitespaces and right after the hyphens in +hyphenated words; only then will long words be broken if necessary, unless +:attr:`TextWrapper.break_long_words` is set to false. + .. class:: TextWrapper(**kwargs) The :class:`TextWrapper` constructor accepts a number of optional keyword @@ -235,7 +250,7 @@ was to always allow breaking hyphenated words. - :class:`TextWrapper` also provides two public methods, analogous to the + :class:`TextWrapper` also provides some public methods, analogous to the module-level convenience functions: .. method:: wrap(text) @@ -252,3 +267,14 @@ Wraps the single paragraph in *text*, and returns a single string containing the wrapped paragraph. + + .. function:: shorten(text, *, placeholder=" (...)") + + Collapse and truncate the given text to fit in :attr:`width` + characters. + + The text first has its whitespace collapsed. If it then fits in + :attr:`width`, it is returned as-is. Otherwise, as many words + as possible are joined and then the *placeholder* is appended. + + .. versionadded:: 3.4 diff --git a/Lib/test/test_textwrap.py b/Lib/test/test_textwrap.py --- a/Lib/test/test_textwrap.py +++ b/Lib/test/test_textwrap.py @@ -9,9 +9,8 @@ # import unittest -from test import support -from textwrap import TextWrapper, wrap, fill, dedent, indent +from textwrap import TextWrapper, wrap, fill, dedent, indent, shorten class BaseTestCase(unittest.TestCase): @@ -43,6 +42,10 @@ "\nexpected %r\n" "but got %r" % (expect, result)) + def check_shorten(self, text, width, expect, **kwargs): + result = shorten(text, width, **kwargs) + self.check(result, expect) + class WrapTestCase(BaseTestCase): @@ -777,12 +780,59 @@ self.assertEqual(indent(text, prefix, predicate), expect) -def test_main(): - support.run_unittest(WrapTestCase, - LongWordTestCase, - IndentTestCases, - DedentTestCase, - IndentTestCase) +class ShortenTestCase(BaseTestCase): + + def test_simple(self): + # Simple case: just words, spaces, and a bit of punctuation + text = "Hello there, how are you this fine day? I'm glad to hear it!" + + self.check_shorten(text, 18, "Hello there, (...)") + self.check_shorten(text, len(text), text) + self.check_shorten(text, len(text) - 1, + "Hello there, how are you this fine day? " + "I'm glad to (...)") + + def test_placeholder(self): + text = "Hello there, how are you this fine day? I'm glad to hear it!" + + self.check_shorten(text, 17, "Hello there,$$", placeholder='$$') + self.check_shorten(text, 18, "Hello there, how$$", placeholder='$$') + self.check_shorten(text, 18, "Hello there, $$", placeholder=' $$') + self.check_shorten(text, len(text), text, placeholder='$$') + self.check_shorten(text, len(text) - 1, + "Hello there, how are you this fine day? " + "I'm glad to hear$$", placeholder='$$') + + def test_empty_string(self): + self.check_shorten("", 6, "") + + def test_whitespace(self): + # Whitespace collapsing + text = """ + This is a paragraph that already has + line breaks and \t tabs too.""" + self.check_shorten(text, 62, + "This is a paragraph that already has line " + "breaks and tabs too.") + self.check_shorten(text, 61, + "This is a paragraph that already has line " + "breaks and (...)") + + self.check_shorten("hello world! ", 12, "hello world!") + self.check_shorten("hello world! ", 11, "hello (...)") + # The leading space is trimmed from the placeholder + # (it would be ugly otherwise). + self.check_shorten("hello world! ", 10, "(...)") + + def test_width_too_small_for_placeholder(self): + wrapper = TextWrapper(width=8) + wrapper.shorten("x" * 20, placeholder="(......)") + with self.assertRaises(ValueError): + wrapper.shorten("x" * 20, placeholder="(.......)") + + def test_first_word_too_long_but_placeholder_fits(self): + self.check_shorten("Helloo", 5, "(...)") + if __name__ == '__main__': - test_main() + unittest.main() diff --git a/Lib/textwrap.py b/Lib/textwrap.py --- a/Lib/textwrap.py +++ b/Lib/textwrap.py @@ -19,6 +19,8 @@ # since 0xa0 is not in range(128). _whitespace = '\t\n\x0b\x0c\r ' +_default_placeholder = ' (...)' + class TextWrapper: """ Object for wrapping/filling text. The public interface consists of @@ -277,6 +279,9 @@ return lines + def _split_chunks(self, text): + text = self._munge_whitespace(text) + return self._split(text) # -- Public interface ---------------------------------------------- @@ -289,8 +294,7 @@ and all other whitespace characters (including newline) are converted to space. """ - text = self._munge_whitespace(text) - chunks = self._split(text) + chunks = self._split_chunks(text) if self.fix_sentence_endings: self._fix_sentence_endings(chunks) return self._wrap_chunks(chunks) @@ -304,6 +308,36 @@ """ return "\n".join(self.wrap(text)) + def shorten(self, text, *, placeholder=_default_placeholder): + """shorten(text: str) -> str + + Collapse and truncate the given text to fit in 'self.width' columns. + """ + max_length = self.width + if max_length < len(placeholder.strip()): + raise ValueError("placeholder too large for max width") + sep = ' ' + sep_len = len(sep) + parts = [] + cur_len = 0 + chunks = self._split_chunks(text) + for chunk in chunks: + if not chunk.strip(): + continue + chunk_len = len(chunk) + sep_len if parts else len(chunk) + if cur_len + chunk_len > max_length: + break + parts.append(chunk) + cur_len += chunk_len + else: + # No truncation necessary + return sep.join(parts) + max_truncated_length = max_length - len(placeholder) + while parts and cur_len > max_truncated_length: + last = parts.pop() + cur_len -= len(last) + sep_len + return (sep.join(parts) + placeholder).strip() + # -- Convenience interface --------------------------------------------- @@ -332,6 +366,21 @@ w = TextWrapper(width=width, **kwargs) return w.fill(text) +def shorten(text, width, *, placeholder=_default_placeholder, **kwargs): + """Collapse and truncate the given text to fit in the given width. + + The text first has its whitespace collapsed. If it then fits in + the *width*, it is returned as is. Otherwise, as many words + as possible are joined and then the placeholder is appended:: + + >>> textwrap.shorten("Hello world!", width=12) + 'Hello world!' + >>> textwrap.shorten("Hello world!", width=11) + 'Hello (...)' + """ + w = TextWrapper(width=width, **kwargs) + return w.shorten(text, placeholder=placeholder) + # -- Loosely related functionality ------------------------------------- diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -26,6 +26,9 @@ Library ------- +- Issue #18585: Add :func:`textwrap.shorten` to collapse and truncate a + piece of text to a given length. + - Issue #18598: Tweak exception message for importlib.import_module() to include the module name when a key argument is missing. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 23:15:53 2013 From: python-checkins at python.org (david.wolever) Date: Mon, 12 Aug 2013 23:15:53 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE3NzAx?= =?utf-8?q?=3A_Improving_strftime_documentation=2E?= Message-ID: <3cDVGK2WwjzNWm@mail.python.org> http://hg.python.org/cpython/rev/1d4b02d8fa8a changeset: 85141:1d4b02d8fa8a branch: 3.3 parent: 85129:a89226508a04 user: David Wolever date: Mon Aug 12 16:56:02 2013 -0400 summary: Issue #17701: Improving strftime documentation. files: Doc/library/datetime.rst | 314 ++++++++++++++------------ Misc/NEWS | 2 + 2 files changed, 173 insertions(+), 143 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -593,15 +593,17 @@ .. method:: date.strftime(format) Return a string representing the date, controlled by an explicit format string. - Format codes referring to hours, minutes or seconds will see 0 values. See - section :ref:`strftime-strptime-behavior`. + Format codes referring to hours, minutes or seconds will see 0 values. For a + complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. .. method:: date.__format__(format) Same as :meth:`.date.strftime`. This makes it possible to specify format - string for a :class:`.date` object when using :meth:`str.format`. - See section :ref:`strftime-strptime-behavior`. + string for a :class:`.date` object when using :meth:`str.format`. For a + complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. Example of counting days to an event:: @@ -793,7 +795,8 @@ *format*. This is equivalent to ``datetime(*(time.strptime(date_string, format)[0:6]))``. :exc:`ValueError` is raised if the date_string and format can't be parsed by :func:`time.strptime` or if it returns a value which isn't a - time tuple. See section :ref:`strftime-strptime-behavior`. + time tuple. For a complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. @@ -1160,14 +1163,16 @@ .. method:: datetime.strftime(format) Return a string representing the date and time, controlled by an explicit format - string. See section :ref:`strftime-strptime-behavior`. + string. For a complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. .. method:: datetime.__format__(format) Same as :meth:`.datetime.strftime`. This makes it possible to specify format - string for a :class:`.datetime` object when using :meth:`str.format`. - See section :ref:`strftime-strptime-behavior`. + string for a :class:`.datetime` object when using :meth:`str.format`. For a + complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. Examples of working with datetime objects: @@ -1399,15 +1404,17 @@ .. method:: time.strftime(format) - Return a string representing the time, controlled by an explicit format string. - See section :ref:`strftime-strptime-behavior`. + Return a string representing the time, controlled by an explicit format + string. For a complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. .. method:: time.__format__(format) Same as :meth:`.time.strftime`. This makes it possible to specify format string - for a :class:`.time` object when using :meth:`str.format`. - See section :ref:`strftime-strptime-behavior`. + for a :class:`.time` object when using :meth:`str.format`. For a + complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. .. method:: time.utcoffset() @@ -1773,22 +1780,6 @@ microseconds should not be used, as :class:`date` objects have no such values. If they're used anyway, ``0`` is substituted for them. -For a naive object, the ``%z`` and ``%Z`` format codes are replaced by empty -strings. - -For an aware object: - -``%z`` - :meth:`utcoffset` is transformed into a 5-character string of the form +HHMM or - -HHMM, where HH is a 2-digit string giving the number of UTC offset hours, and - MM is a 2-digit string giving the number of UTC offset minutes. For example, if - :meth:`utcoffset` returns ``timedelta(hours=-3, minutes=-30)``, ``%z`` is - replaced with the string ``'-0330'``. - -``%Z`` - If :meth:`tzname` returns ``None``, ``%Z`` is replaced by an empty string. - Otherwise ``%Z`` is replaced by the returned value, which must be a string. - The full set of format codes supported varies across platforms, because Python calls the platform C library's :func:`strftime` function, and platform variations are common. @@ -1798,133 +1789,170 @@ implementation. Note that the 1999 version of the C standard added additional format codes. -+-----------+--------------------------------+-------+ -| Directive | Meaning | Notes | -+===========+================================+=======+ -| ``%a`` | Locale's abbreviated weekday | | -| | name. | | -+-----------+--------------------------------+-------+ -| ``%A`` | Locale's full weekday name. | | -+-----------+--------------------------------+-------+ -| ``%b`` | Locale's abbreviated month | | -| | name. | | -+-----------+--------------------------------+-------+ -| ``%B`` | Locale's full month name. | | -+-----------+--------------------------------+-------+ -| ``%c`` | Locale's appropriate date and | | -| | time representation. | | -+-----------+--------------------------------+-------+ -| ``%d`` | Day of the month as a decimal | | -| | number [01,31]. | | -+-----------+--------------------------------+-------+ -| ``%f`` | Microsecond as a decimal | \(1) | -| | number [0,999999], zero-padded | | -| | on the left | | -+-----------+--------------------------------+-------+ -| ``%H`` | Hour (24-hour clock) as a | | -| | decimal number [00,23]. | | -+-----------+--------------------------------+-------+ -| ``%I`` | Hour (12-hour clock) as a | | -| | decimal number [01,12]. | | -+-----------+--------------------------------+-------+ -| ``%j`` | Day of the year as a decimal | | -| | number [001,366]. | | -+-----------+--------------------------------+-------+ -| ``%m`` | Month as a decimal number | | -| | [01,12]. | | -+-----------+--------------------------------+-------+ -| ``%M`` | Minute as a decimal number | | -| | [00,59]. | | -+-----------+--------------------------------+-------+ -| ``%p`` | Locale's equivalent of either | \(2) | -| | AM or PM. | | -+-----------+--------------------------------+-------+ -| ``%S`` | Second as a decimal number | \(3) | -| | [00,59]. | | -+-----------+--------------------------------+-------+ -| ``%U`` | Week number of the year | \(4) | -| | (Sunday as the first day of | | -| | the week) as a decimal number | | -| | [00,53]. All days in a new | | -| | year preceding the first | | -| | Sunday are considered to be in | | -| | week 0. | | -+-----------+--------------------------------+-------+ -| ``%w`` | Weekday as a decimal number | | -| | [0(Sunday),6]. | | -+-----------+--------------------------------+-------+ -| ``%W`` | Week number of the year | \(4) | -| | (Monday as the first day of | | -| | the week) as a decimal number | | -| | [00,53]. All days in a new | | -| | year preceding the first | | -| | Monday are considered to be in | | -| | week 0. | | -+-----------+--------------------------------+-------+ -| ``%x`` | Locale's appropriate date | | -| | representation. | | -+-----------+--------------------------------+-------+ -| ``%X`` | Locale's appropriate time | | -| | representation. | | -+-----------+--------------------------------+-------+ -| ``%y`` | Year without century as a | | -| | decimal number [00,99]. | | -+-----------+--------------------------------+-------+ -| ``%Y`` | Year with century as a decimal | \(5) | -| | number [0001,9999]. | | -+-----------+--------------------------------+-------+ -| ``%z`` | UTC offset in the form +HHMM | \(6) | -| | or -HHMM (empty string if the | | -| | the object is naive). | | -+-----------+--------------------------------+-------+ -| ``%Z`` | Time zone name (empty string | | -| | if the object is naive). | | -+-----------+--------------------------------+-------+ -| ``%%`` | A literal ``'%'`` character. | | -+-----------+--------------------------------+-------+ ++-----------+--------------------------------+------------------------+-------+ +| Directive | Meaning | Example | Notes | ++===========+================================+========================+=======+ +| ``%a`` | Weekday as locale's || Sun, Mon, ..., Sat | \(1) | +| | abbreviated name. | (en_US); | | +| | || So, Mo, ..., Sa | | +| | | (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%A`` | Weekday as locale's full name. || Sunday, Monday, ..., | \(1) | +| | | Saturday (en_US); | | +| | || Sonntag, Montag, ..., | | +| | | Samstag (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%w`` | Weekday as a decimal number, | 0, 1, ..., 6 | | +| | where 0 is Sunday and 6 is | | | +| | Saturday. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%d`` | Day of the month as a | 01, 02, ..., 31 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%b`` | Month as locale's abbreviated || Jan, Feb, ..., Dec | \(1) | +| | name. | (en_US); | | +| | || Jan, Feb, ..., Dez | | +| | | (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%B`` | Month as locale's full name. || January, February, | \(1) | +| | | ..., December (en_US);| | +| | || Januar, Februar, ..., | | +| | | Dezember (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%m`` | Month as a zero-padded | 01, 02, ..., 12 | | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%y`` | Year without century as a | 00, 01, ..., 99 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%Y`` | Year with century as a decimal | 0001, 0002, ..., 2013, | \(2) | +| | number. | 2014, ...., 9998, 9999 | | ++-----------+--------------------------------+------------------------+-------+ +| ``%H`` | Hour (24-hour clock) as a | 00, 01, ..., 23 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%I`` | Hour (12-hour clock) as a | 01, 02, ..., 12 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%p`` | Locale's equivalent of either || AM, PM (en_US); | \(1), | +| | AM or PM. || am, pm (de_DE) | \(3) | ++-----------+--------------------------------+------------------------+-------+ +| ``%M`` | Minute as a zero-padded | 00, 01, ..., 59 | | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%S`` | Second as a zero-padded | 00, 01, ..., 59 | \(4) | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%f`` | Microsecond as a decimal | 000000, 000001, ..., | \(5) | +| | number, zero-padded on the | 999999 | | +| | left. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%z`` | UTC offset in the form +HHMM | (empty), +0000, -0400, | \(6) | +| | or -HHMM (empty string if the | +1030 | | +| | the object is naive). | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%Z`` | Time zone name (empty string | (empty), UTC, EST, CST | | +| | if the object is naive). | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%j`` | Day of the year as a | 001, 002, ..., 366 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%U`` | Week number of the year | 00, 01, ..., 53 | \(7) | +| | (Sunday as the first day of | | | +| | the week) as a zero padded | | | +| | decimal number. All days in a | | | +| | new year preceding the first | | | +| | Sunday are considered to be in | | | +| | week 0. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%W`` | Week number of the year | 00, 01, ..., 53 | \(7) | +| | (Monday as the first day of | | | +| | the week) as a decimal number. | | | +| | All days in a new year | | | +| | preceding the first Monday | | | +| | are considered to be in | | | +| | week 0. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%c`` | Locale's appropriate date and || Tue Aug 16 21:30:00 | \(1) | +| | time representation. | 1988 (en_US); | | +| | || Di 16 Aug 21:30:00 | | +| | | 1988 (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%x`` | Locale's appropriate date || 08/16/88 (None); | \(1) | +| | representation. || 08/16/1988 (en_US); | | +| | || 16.08.1988 (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%X`` | Locale's appropriate time || 21:30:00 (en_US); | \(1) | +| | representation. || 21:30:00 (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%%`` | A literal ``'%'`` character. | % | | ++-----------+--------------------------------+------------------------+-------+ Notes: (1) + Because the format depends on the current locale, care should be taken when + making assumptions about the output value. Field orderings will vary (for + example, "month/day/year" versus "day/month/year"), and the output may + contain Unicode characters encoded using the locale's default encoding (for + example, if the current locale is ``js_JP``, the default encoding could be + any one of ``eucJP``, ``SJIS``, or ``utf-8``; use :meth:`locale.getlocale` + to determine the current locale's encoding). + +(2) + The :meth:`strptime` method can parse years in the full [1, 9999] range, but + years < 1000 must be zero-filled to 4-digit width. + + .. versionchanged:: 3.2 + In previous versions, :meth:`strftime` method was restricted to + years >= 1900. + + .. versionchanged:: 3.3 + In version 3.2, :meth:`strftime` method was restricted to + years >= 1000. + +(3) + When used with the :meth:`strptime` method, the ``%p`` directive only affects + the output hour field if the ``%I`` directive is used to parse the hour. + +(4) + Unlike the :mod:`time` module, the :mod:`datetime` module does not support + leap seconds. + +(5) When used with the :meth:`strptime` method, the ``%f`` directive accepts from one to six digits and zero pads on the right. ``%f`` is an extension to the set of format characters in the C standard (but implemented separately in datetime objects, and therefore always available). -(2) - When used with the :meth:`strptime` method, the ``%p`` directive only affects - the output hour field if the ``%I`` directive is used to parse the hour. - -(3) - Unlike :mod:`time` module, :mod:`datetime` module does not support - leap seconds. - -(4) - When used with the :meth:`strptime` method, ``%U`` and ``%W`` are only used in - calculations when the day of the week and the year are specified. - -(5) - The :meth:`strptime` method can - parse years in the full [1, 9999] range, but years < 1000 must be - zero-filled to 4-digit width. +(6) + For a naive object, the ``%z`` and ``%Z`` format codes are replaced by empty + strings. + + For an aware object: + + ``%z`` + :meth:`utcoffset` is transformed into a 5-character string of the form + +HHMM or -HHMM, where HH is a 2-digit string giving the number of UTC + offset hours, and MM is a 2-digit string giving the number of UTC offset + minutes. For example, if :meth:`utcoffset` returns + ``timedelta(hours=-3, minutes=-30)``, ``%z`` is replaced with the string + ``'-0330'``. + + ``%Z`` + If :meth:`tzname` returns ``None``, ``%Z`` is replaced by an empty + string. Otherwise ``%Z`` is replaced by the returned value, which must + be a string. .. versionchanged:: 3.2 - In previous versions, :meth:`strftime` method was restricted to - years >= 1900. - - .. versionchanged:: 3.3 - In version 3.2, :meth:`strftime` method was restricted to - years >= 1000. - -(6) - For example, if :meth:`utcoffset` returns ``timedelta(hours=-3, minutes=-30)``, - ``%z`` is replaced with the string ``'-0330'``. - -.. versionchanged:: 3.2 - When the ``%z`` directive is provided to the :meth:`strptime` method, an - aware :class:`.datetime` object will be produced. The ``tzinfo`` of the - result will be set to a :class:`timezone` instance. + When the ``%z`` directive is provided to the :meth:`strptime` method, an + aware :class:`.datetime` object will be produced. The ``tzinfo`` of the + result will be set to a :class:`timezone` instance. + +(7) + When used with the :meth:`strptime` method, ``%U`` and ``%W`` are only used + in calculations when the day of the week and the year are specified. .. rubric:: Footnotes diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -298,6 +298,8 @@ Documentation ------------- +- Issue #17701: Improving strftime documentation. + - Issue #18440: Clarify that `hash()` can truncate the value returned from an object's custom `__hash__()` method. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 12 23:15:54 2013 From: python-checkins at python.org (david.wolever) Date: Mon, 12 Aug 2013 23:15:54 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_issue_=2317701=3A_Improving_strftime_documentation?= =?utf-8?q?=2E?= Message-ID: <3cDVGL6lhlzSjg@mail.python.org> http://hg.python.org/cpython/rev/ab550dac6209 changeset: 85142:ab550dac6209 parent: 85140:c27ec198d3d1 parent: 85141:1d4b02d8fa8a user: David Wolever date: Mon Aug 12 17:15:36 2013 -0400 summary: Merge issue #17701: Improving strftime documentation. files: Doc/library/datetime.rst | 314 ++++++++++++++------------ Misc/NEWS | 2 + 2 files changed, 173 insertions(+), 143 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -595,15 +595,17 @@ .. method:: date.strftime(format) Return a string representing the date, controlled by an explicit format string. - Format codes referring to hours, minutes or seconds will see 0 values. See - section :ref:`strftime-strptime-behavior`. + Format codes referring to hours, minutes or seconds will see 0 values. For a + complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. .. method:: date.__format__(format) Same as :meth:`.date.strftime`. This makes it possible to specify format - string for a :class:`.date` object when using :meth:`str.format`. - See section :ref:`strftime-strptime-behavior`. + string for a :class:`.date` object when using :meth:`str.format`. For a + complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. Example of counting days to an event:: @@ -795,7 +797,8 @@ *format*. This is equivalent to ``datetime(*(time.strptime(date_string, format)[0:6]))``. :exc:`ValueError` is raised if the date_string and format can't be parsed by :func:`time.strptime` or if it returns a value which isn't a - time tuple. See section :ref:`strftime-strptime-behavior`. + time tuple. For a complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. @@ -1162,14 +1165,16 @@ .. method:: datetime.strftime(format) Return a string representing the date and time, controlled by an explicit format - string. See section :ref:`strftime-strptime-behavior`. + string. For a complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. .. method:: datetime.__format__(format) Same as :meth:`.datetime.strftime`. This makes it possible to specify format - string for a :class:`.datetime` object when using :meth:`str.format`. - See section :ref:`strftime-strptime-behavior`. + string for a :class:`.datetime` object when using :meth:`str.format`. For a + complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. Examples of working with datetime objects: @@ -1401,15 +1406,17 @@ .. method:: time.strftime(format) - Return a string representing the time, controlled by an explicit format string. - See section :ref:`strftime-strptime-behavior`. + Return a string representing the time, controlled by an explicit format + string. For a complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. .. method:: time.__format__(format) Same as :meth:`.time.strftime`. This makes it possible to specify format string - for a :class:`.time` object when using :meth:`str.format`. - See section :ref:`strftime-strptime-behavior`. + for a :class:`.time` object when using :meth:`str.format`. For a + complete list of formatting directives, see + :ref:`strftime-strptime-behavior`. .. method:: time.utcoffset() @@ -1775,22 +1782,6 @@ microseconds should not be used, as :class:`date` objects have no such values. If they're used anyway, ``0`` is substituted for them. -For a naive object, the ``%z`` and ``%Z`` format codes are replaced by empty -strings. - -For an aware object: - -``%z`` - :meth:`utcoffset` is transformed into a 5-character string of the form +HHMM or - -HHMM, where HH is a 2-digit string giving the number of UTC offset hours, and - MM is a 2-digit string giving the number of UTC offset minutes. For example, if - :meth:`utcoffset` returns ``timedelta(hours=-3, minutes=-30)``, ``%z`` is - replaced with the string ``'-0330'``. - -``%Z`` - If :meth:`tzname` returns ``None``, ``%Z`` is replaced by an empty string. - Otherwise ``%Z`` is replaced by the returned value, which must be a string. - The full set of format codes supported varies across platforms, because Python calls the platform C library's :func:`strftime` function, and platform variations are common. @@ -1800,133 +1791,170 @@ implementation. Note that the 1999 version of the C standard added additional format codes. -+-----------+--------------------------------+-------+ -| Directive | Meaning | Notes | -+===========+================================+=======+ -| ``%a`` | Locale's abbreviated weekday | | -| | name. | | -+-----------+--------------------------------+-------+ -| ``%A`` | Locale's full weekday name. | | -+-----------+--------------------------------+-------+ -| ``%b`` | Locale's abbreviated month | | -| | name. | | -+-----------+--------------------------------+-------+ -| ``%B`` | Locale's full month name. | | -+-----------+--------------------------------+-------+ -| ``%c`` | Locale's appropriate date and | | -| | time representation. | | -+-----------+--------------------------------+-------+ -| ``%d`` | Day of the month as a decimal | | -| | number [01,31]. | | -+-----------+--------------------------------+-------+ -| ``%f`` | Microsecond as a decimal | \(1) | -| | number [0,999999], zero-padded | | -| | on the left | | -+-----------+--------------------------------+-------+ -| ``%H`` | Hour (24-hour clock) as a | | -| | decimal number [00,23]. | | -+-----------+--------------------------------+-------+ -| ``%I`` | Hour (12-hour clock) as a | | -| | decimal number [01,12]. | | -+-----------+--------------------------------+-------+ -| ``%j`` | Day of the year as a decimal | | -| | number [001,366]. | | -+-----------+--------------------------------+-------+ -| ``%m`` | Month as a decimal number | | -| | [01,12]. | | -+-----------+--------------------------------+-------+ -| ``%M`` | Minute as a decimal number | | -| | [00,59]. | | -+-----------+--------------------------------+-------+ -| ``%p`` | Locale's equivalent of either | \(2) | -| | AM or PM. | | -+-----------+--------------------------------+-------+ -| ``%S`` | Second as a decimal number | \(3) | -| | [00,59]. | | -+-----------+--------------------------------+-------+ -| ``%U`` | Week number of the year | \(4) | -| | (Sunday as the first day of | | -| | the week) as a decimal number | | -| | [00,53]. All days in a new | | -| | year preceding the first | | -| | Sunday are considered to be in | | -| | week 0. | | -+-----------+--------------------------------+-------+ -| ``%w`` | Weekday as a decimal number | | -| | [0(Sunday),6]. | | -+-----------+--------------------------------+-------+ -| ``%W`` | Week number of the year | \(4) | -| | (Monday as the first day of | | -| | the week) as a decimal number | | -| | [00,53]. All days in a new | | -| | year preceding the first | | -| | Monday are considered to be in | | -| | week 0. | | -+-----------+--------------------------------+-------+ -| ``%x`` | Locale's appropriate date | | -| | representation. | | -+-----------+--------------------------------+-------+ -| ``%X`` | Locale's appropriate time | | -| | representation. | | -+-----------+--------------------------------+-------+ -| ``%y`` | Year without century as a | | -| | decimal number [00,99]. | | -+-----------+--------------------------------+-------+ -| ``%Y`` | Year with century as a decimal | \(5) | -| | number [0001,9999]. | | -+-----------+--------------------------------+-------+ -| ``%z`` | UTC offset in the form +HHMM | \(6) | -| | or -HHMM (empty string if the | | -| | the object is naive). | | -+-----------+--------------------------------+-------+ -| ``%Z`` | Time zone name (empty string | | -| | if the object is naive). | | -+-----------+--------------------------------+-------+ -| ``%%`` | A literal ``'%'`` character. | | -+-----------+--------------------------------+-------+ ++-----------+--------------------------------+------------------------+-------+ +| Directive | Meaning | Example | Notes | ++===========+================================+========================+=======+ +| ``%a`` | Weekday as locale's || Sun, Mon, ..., Sat | \(1) | +| | abbreviated name. | (en_US); | | +| | || So, Mo, ..., Sa | | +| | | (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%A`` | Weekday as locale's full name. || Sunday, Monday, ..., | \(1) | +| | | Saturday (en_US); | | +| | || Sonntag, Montag, ..., | | +| | | Samstag (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%w`` | Weekday as a decimal number, | 0, 1, ..., 6 | | +| | where 0 is Sunday and 6 is | | | +| | Saturday. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%d`` | Day of the month as a | 01, 02, ..., 31 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%b`` | Month as locale's abbreviated || Jan, Feb, ..., Dec | \(1) | +| | name. | (en_US); | | +| | || Jan, Feb, ..., Dez | | +| | | (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%B`` | Month as locale's full name. || January, February, | \(1) | +| | | ..., December (en_US);| | +| | || Januar, Februar, ..., | | +| | | Dezember (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%m`` | Month as a zero-padded | 01, 02, ..., 12 | | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%y`` | Year without century as a | 00, 01, ..., 99 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%Y`` | Year with century as a decimal | 0001, 0002, ..., 2013, | \(2) | +| | number. | 2014, ...., 9998, 9999 | | ++-----------+--------------------------------+------------------------+-------+ +| ``%H`` | Hour (24-hour clock) as a | 00, 01, ..., 23 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%I`` | Hour (12-hour clock) as a | 01, 02, ..., 12 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%p`` | Locale's equivalent of either || AM, PM (en_US); | \(1), | +| | AM or PM. || am, pm (de_DE) | \(3) | ++-----------+--------------------------------+------------------------+-------+ +| ``%M`` | Minute as a zero-padded | 00, 01, ..., 59 | | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%S`` | Second as a zero-padded | 00, 01, ..., 59 | \(4) | +| | decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%f`` | Microsecond as a decimal | 000000, 000001, ..., | \(5) | +| | number, zero-padded on the | 999999 | | +| | left. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%z`` | UTC offset in the form +HHMM | (empty), +0000, -0400, | \(6) | +| | or -HHMM (empty string if the | +1030 | | +| | the object is naive). | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%Z`` | Time zone name (empty string | (empty), UTC, EST, CST | | +| | if the object is naive). | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%j`` | Day of the year as a | 001, 002, ..., 366 | | +| | zero-padded decimal number. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%U`` | Week number of the year | 00, 01, ..., 53 | \(7) | +| | (Sunday as the first day of | | | +| | the week) as a zero padded | | | +| | decimal number. All days in a | | | +| | new year preceding the first | | | +| | Sunday are considered to be in | | | +| | week 0. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%W`` | Week number of the year | 00, 01, ..., 53 | \(7) | +| | (Monday as the first day of | | | +| | the week) as a decimal number. | | | +| | All days in a new year | | | +| | preceding the first Monday | | | +| | are considered to be in | | | +| | week 0. | | | ++-----------+--------------------------------+------------------------+-------+ +| ``%c`` | Locale's appropriate date and || Tue Aug 16 21:30:00 | \(1) | +| | time representation. | 1988 (en_US); | | +| | || Di 16 Aug 21:30:00 | | +| | | 1988 (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%x`` | Locale's appropriate date || 08/16/88 (None); | \(1) | +| | representation. || 08/16/1988 (en_US); | | +| | || 16.08.1988 (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%X`` | Locale's appropriate time || 21:30:00 (en_US); | \(1) | +| | representation. || 21:30:00 (de_DE) | | ++-----------+--------------------------------+------------------------+-------+ +| ``%%`` | A literal ``'%'`` character. | % | | ++-----------+--------------------------------+------------------------+-------+ Notes: (1) + Because the format depends on the current locale, care should be taken when + making assumptions about the output value. Field orderings will vary (for + example, "month/day/year" versus "day/month/year"), and the output may + contain Unicode characters encoded using the locale's default encoding (for + example, if the current locale is ``js_JP``, the default encoding could be + any one of ``eucJP``, ``SJIS``, or ``utf-8``; use :meth:`locale.getlocale` + to determine the current locale's encoding). + +(2) + The :meth:`strptime` method can parse years in the full [1, 9999] range, but + years < 1000 must be zero-filled to 4-digit width. + + .. versionchanged:: 3.2 + In previous versions, :meth:`strftime` method was restricted to + years >= 1900. + + .. versionchanged:: 3.3 + In version 3.2, :meth:`strftime` method was restricted to + years >= 1000. + +(3) + When used with the :meth:`strptime` method, the ``%p`` directive only affects + the output hour field if the ``%I`` directive is used to parse the hour. + +(4) + Unlike the :mod:`time` module, the :mod:`datetime` module does not support + leap seconds. + +(5) When used with the :meth:`strptime` method, the ``%f`` directive accepts from one to six digits and zero pads on the right. ``%f`` is an extension to the set of format characters in the C standard (but implemented separately in datetime objects, and therefore always available). -(2) - When used with the :meth:`strptime` method, the ``%p`` directive only affects - the output hour field if the ``%I`` directive is used to parse the hour. - -(3) - Unlike :mod:`time` module, :mod:`datetime` module does not support - leap seconds. - -(4) - When used with the :meth:`strptime` method, ``%U`` and ``%W`` are only used in - calculations when the day of the week and the year are specified. - -(5) - The :meth:`strptime` method can - parse years in the full [1, 9999] range, but years < 1000 must be - zero-filled to 4-digit width. +(6) + For a naive object, the ``%z`` and ``%Z`` format codes are replaced by empty + strings. + + For an aware object: + + ``%z`` + :meth:`utcoffset` is transformed into a 5-character string of the form + +HHMM or -HHMM, where HH is a 2-digit string giving the number of UTC + offset hours, and MM is a 2-digit string giving the number of UTC offset + minutes. For example, if :meth:`utcoffset` returns + ``timedelta(hours=-3, minutes=-30)``, ``%z`` is replaced with the string + ``'-0330'``. + + ``%Z`` + If :meth:`tzname` returns ``None``, ``%Z`` is replaced by an empty + string. Otherwise ``%Z`` is replaced by the returned value, which must + be a string. .. versionchanged:: 3.2 - In previous versions, :meth:`strftime` method was restricted to - years >= 1900. - - .. versionchanged:: 3.3 - In version 3.2, :meth:`strftime` method was restricted to - years >= 1000. - -(6) - For example, if :meth:`utcoffset` returns ``timedelta(hours=-3, minutes=-30)``, - ``%z`` is replaced with the string ``'-0330'``. - -.. versionchanged:: 3.2 - When the ``%z`` directive is provided to the :meth:`strptime` method, an - aware :class:`.datetime` object will be produced. The ``tzinfo`` of the - result will be set to a :class:`timezone` instance. + When the ``%z`` directive is provided to the :meth:`strptime` method, an + aware :class:`.datetime` object will be produced. The ``tzinfo`` of the + result will be set to a :class:`timezone` instance. + +(7) + When used with the :meth:`strptime` method, ``%U`` and ``%W`` are only used + in calculations when the day of the week and the year are specified. .. rubric:: Footnotes diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -750,6 +750,8 @@ Documentation ------------- +- Issue #17701: Improving strftime documentation. + - Issue #18440: Clarify that `hash()` can truncate the value returned from an object's custom `__hash__()` method. -- Repository URL: http://hg.python.org/cpython From ezio.melotti at gmail.com Mon Aug 12 23:42:25 2013 From: ezio.melotti at gmail.com (Ezio Melotti) Date: Tue, 13 Aug 2013 00:42:25 +0300 Subject: [Python-checkins] cpython (merge 2.7 -> 2.7): Clean merge In-Reply-To: <3cDSP02w6Qz7LjM@mail.python.org> References: <3cDSP02w6Qz7LjM@mail.python.org> Message-ID: Hi, On Mon, Aug 12, 2013 at 10:51 PM, david.wolever wrote: > http://hg.python.org/cpython/rev/0f4d971b0cee > changeset: 85138:0f4d971b0cee > branch: 2.7 > parent: 85137:102b3e257dca > parent: 83899:ef037ad304c1 > user: David Wolever > date: Thu May 23 17:51:58 2013 -0400 > summary: > Clean merge > > files: > .hgtags | 1 + > Doc/c-api/exceptions.rst | 38 +- > Doc/c-api/intro.rst | 4 +- > Doc/faq/design.rst | 4 +- > Doc/faq/programming.rst | 86 + > Doc/glossary.rst | 8 + > Doc/howto/advocacy.rst | 355 ------- > Doc/howto/index.rst | 1 - > Doc/howto/sockets.rst | 8 +- > Doc/howto/urllib2.rst | 12 +- > Doc/library/codecs.rst | 172 ++- > Doc/library/collections.rst | 4 +- > Doc/library/compileall.rst | 2 +- > Doc/library/ctypes.rst | 2 +- > Doc/library/io.rst | 3 + > Doc/library/itertools.rst | 4 +- > Doc/library/numbers.rst | 8 +- > Doc/library/operator.rst | 47 +- > Doc/library/resource.rst | 21 +- > Doc/library/socket.rst | 16 +- > Doc/library/ssl.rst | 16 +- > Doc/library/stdtypes.rst | 28 +- > Doc/library/string.rst | 5 +- > Doc/library/unittest.rst | 2 + > Doc/library/urllib.rst | 7 + > Doc/library/urllib2.rst | 15 +- > Doc/reference/datamodel.rst | 9 +- > Doc/reference/expressions.rst | 15 +- > Doc/reference/simple_stmts.rst | 3 + > Doc/tutorial/inputoutput.rst | 23 +- > Doc/tutorial/modules.rst | 7 +- > Doc/using/mac.rst | 14 +- > Include/object.h | 16 +- > Include/patchlevel.h | 4 +- > Lib/_weakrefset.py | 6 + > Lib/collections.py | 2 - > Lib/ctypes/test/__init__.py | 2 +- > Lib/ctypes/test/test_wintypes.py | 43 + > Lib/ctypes/util.py | 2 +- > Lib/distutils/__init__.py | 2 +- > Lib/filecmp.py | 2 +- > Lib/gzip.py | 69 +- > Lib/idlelib/Bindings.py | 4 + > Lib/idlelib/EditorWindow.py | 31 +- > Lib/idlelib/PyShell.py | 1 - > Lib/idlelib/help.txt | 3 +- > Lib/idlelib/idlever.py | 2 +- > Lib/idlelib/run.py | 5 + > Lib/logging/handlers.py | 36 +- > Lib/mimetypes.py | 2 + > Lib/multiprocessing/pool.py | 2 + > Lib/multiprocessing/synchronize.py | 2 +- > Lib/multiprocessing/util.py | 5 +- > Lib/pickle.py | 2 +- > Lib/plistlib.py | 4 +- > Lib/pydoc_data/topics.py | 18 +- > Lib/sre_parse.py | 6 +- > Lib/ssl.py | 26 +- > Lib/tarfile.py | 12 +- > Lib/test/pickletester.py | 2 + > Lib/test/test_base64.py | 26 + > Lib/test/test_bz2.py | 31 +- > Lib/test/test_collections.py | 2 +- > Lib/test/test_dictviews.py | 5 + > Lib/test/test_gdb.py | 46 +- > Lib/test/test_gzip.py | 17 - > Lib/test/test_io.py | 4 +- > Lib/test/test_mimetypes.py | 2 + > Lib/test/test_multiprocessing.py | 32 +- > Lib/test/test_plistlib.py | 12 + > Lib/test/test_pydoc.py | 57 +- > Lib/test/test_re.py | 11 + > Lib/test/test_sax.py | 20 + > Lib/test/test_support.py | 9 + > Lib/test/test_tarfile.py | 8 + > Lib/test/test_tcl.py | 18 +- > Lib/test/test_weakset.py | 6 + > Lib/test/test_winreg.py | 12 +- > Lib/test/test_zipfile.py | 10 +- > Lib/test/testbz2_bigmem.bz2 | Bin > Lib/threading.py | 42 +- > Lib/xml/sax/saxutils.py | 8 +- > Misc/ACKS | 9 + > Misc/NEWS | 457 ++++++--- > Misc/RPM/python-2.7.spec | 2 +- > Modules/_ctypes/libffi/src/dlmalloc.c | 5 + > Modules/_multiprocessing/multiprocessing.c | 2 +- > Modules/_sqlite/cursor.c | 2 +- > Modules/_sqlite/util.c | 8 +- > Modules/_sqlite/util.h | 4 +- > Modules/_testcapimodule.c | 2 +- > Modules/cPickle.c | 10 +- > Modules/dbmmodule.c | 8 +- > Modules/operator.c | 14 +- > Modules/readline.c | 27 +- > Modules/selectmodule.c | 35 +- > Modules/signalmodule.c | 14 +- > Modules/sre.h | 4 +- > Objects/dictobject.c | 4 + > PCbuild/rt.bat | 4 +- > README | 2 +- > Tools/scripts/gprof2html.py | 2 +- > configure | 2 +- > configure.ac | 2 +- > setup.py | 8 +- > 105 files changed, 1301 insertions(+), 955 deletions(-) > > To avoid these big merges you can do: # check the two heads that you are going to merge and their csids hg heads . # update to the other head (the one you pulled, not the one you committed) hg up csid-of-the-other-head # merge your changes on with the ones you pulled hg merge This will merge the changes you just committed with the ones you pulled, and result in a shorter diff that is easier to read/review/merge. Otherwise pulling and updating before committing will avoid the problem entirely (unless you end up in a push-race). Best Regards, Ezio Melotti From solipsis at pitrou.net Mon Aug 12 23:50:30 2013 From: solipsis at pitrou.net (Antoine Pitrou) Date: Mon, 12 Aug 2013 23:50:30 +0200 Subject: [Python-checkins] cpython (merge 2.7 -> 2.7): Clean merge References: <3cDSP02w6Qz7LjM@mail.python.org> Message-ID: <20130812235030.3ed3ec73@fsol> On Tue, 13 Aug 2013 00:42:25 +0300 Ezio Melotti wrote: > > To avoid these big merges you can do: > # check the two heads that you are going to merge and their csids > hg heads . > # update to the other head (the one you pulled, not the one you committed) > hg up csid-of-the-other-head > # merge your changes on with the ones you pulled > hg merge > > This will merge the changes you just committed with the ones you > pulled, and result in a shorter diff that is easier to > read/review/merge. > Otherwise pulling and updating before committing will avoid the > problem entirely (unless you end up in a push-race). Or, if you are working on a single branch and no-one is watching you, you can do "hg pull --rebase". Regards Antoine. From python-checkins at python.org Tue Aug 13 00:13:39 2013 From: python-checkins at python.org (david.wolever) Date: Tue, 13 Aug 2013 00:13:39 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Documenting_that_json=2Elo?= =?utf-8?q?ad_may_raise_a_ValueError=2E?= Message-ID: <3cDWXz37VVzNRr@mail.python.org> http://hg.python.org/cpython/rev/f11683963558 changeset: 85143:f11683963558 user: Felix Crux date: Mon Aug 12 17:39:51 2013 -0400 summary: Documenting that json.load may raise a ValueError. Issue #18680: JSONDecoder should document that it raises a ValueError for malformed data files: Doc/library/json.rst | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Doc/library/json.rst b/Doc/library/json.rst --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -245,6 +245,8 @@ kwarg; otherwise :class:`JSONDecoder` is used. Additional keyword arguments will be passed to the constructor of the class. + If the data being deserialized is not a valid JSON document, a + :exc:`ValueError` will be raised. .. function:: loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 00:13:40 2013 From: python-checkins at python.org (david.wolever) Date: Tue, 13 Aug 2013 00:13:40 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Documenting_that_json=2Elo?= =?utf-8?q?ad_may_raise_a_ValueError=2E?= Message-ID: <3cDWY052YpzRkL@mail.python.org> http://hg.python.org/cpython/rev/905fad4cb40a changeset: 85144:905fad4cb40a user: Felix Crux date: Mon Aug 12 17:39:51 2013 -0400 summary: Documenting that json.load may raise a ValueError. Issue #18680: JSONDecoder should document that it raises a ValueError for malformed data files: Doc/library/json.rst | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Doc/library/json.rst b/Doc/library/json.rst --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -256,6 +256,8 @@ The other arguments have the same meaning as in :func:`load`, except *encoding* which is ignored and deprecated. + If the data being deserialized is not a valid JSON document, a + :exc:`ValueError` will be raised. Encoders and Decoders --------------------- -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 00:13:41 2013 From: python-checkins at python.org (david.wolever) Date: Tue, 13 Aug 2013 00:13:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Documenting_that_json=2Elo?= =?utf-8?q?ad_may_raise_a_ValueError=2E?= Message-ID: <3cDWY16prVzS4D@mail.python.org> http://hg.python.org/cpython/rev/7ba77d77b499 changeset: 85145:7ba77d77b499 user: Felix Crux date: Mon Aug 12 17:39:51 2013 -0400 summary: Documenting that json.load may raise a ValueError. Issue #18680: JSONDecoder should document that it raises a ValueError for malformed data files: Doc/library/json.rst | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Doc/library/json.rst b/Doc/library/json.rst --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -329,6 +329,8 @@ those with character codes in the 0-31 range, including ``'\t'`` (tab), ``'\n'``, ``'\r'`` and ``'\0'``. + If the data being deserialized is not a valid JSON document, a + :exc:`ValueError` will be raised. .. method:: decode(s) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 00:28:27 2013 From: python-checkins at python.org (david.wolever) Date: Tue, 13 Aug 2013 00:28:27 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_Documenting_th?= =?utf-8?q?at_json=2Eload_may_raise_a_ValueError=2E?= Message-ID: <3cDWt33gffzRk2@mail.python.org> http://hg.python.org/cpython/rev/f8cf0f997dfa changeset: 85146:f8cf0f997dfa branch: 3.3 parent: 85141:1d4b02d8fa8a user: Felix Crux date: Mon Aug 12 17:39:51 2013 -0400 summary: Documenting that json.load may raise a ValueError. Issue #18680: JSONDecoder should document that it raises a ValueError for malformed data files: Doc/library/json.rst | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Doc/library/json.rst b/Doc/library/json.rst --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -328,6 +328,8 @@ those with character codes in the 0-31 range, including ``'\t'`` (tab), ``'\n'``, ``'\r'`` and ``'\0'``. + If the data being deserialized is not a valid JSON document, a + :exc:`ValueError` will be raised. .. method:: decode(s) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 00:28:28 2013 From: python-checkins at python.org (david.wolever) Date: Tue, 13 Aug 2013 00:28:28 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_Documenting_th?= =?utf-8?q?at_json=2Eload_may_raise_a_ValueError=2E?= Message-ID: <3cDWt45dktzSGY@mail.python.org> http://hg.python.org/cpython/rev/524693d62093 changeset: 85147:524693d62093 branch: 2.7 parent: 85139:adbc9789a5e4 user: Felix Crux date: Mon Aug 12 17:39:51 2013 -0400 summary: Documenting that json.load may raise a ValueError. Issue #18680: JSONDecoder should document that it raises a ValueError for malformed data files: Doc/library/json.rst | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Doc/library/json.rst b/Doc/library/json.rst --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -348,6 +348,8 @@ those with character codes in the 0-31 range, including ``'\t'`` (tab), ``'\n'``, ``'\r'`` and ``'\0'``. + If the data being deserialized is not a valid JSON document, a + :exc:`ValueError` will be raised. .. method:: decode(s) -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Tue Aug 13 05:49:17 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Tue, 13 Aug 2013 05:49:17 +0200 Subject: [Python-checkins] Daily reference leaks (7ba77d77b499): sum=0 Message-ID: results for 7ba77d77b499 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogAn83ji', '-x'] From python-checkins at python.org Tue Aug 13 07:26:23 2013 From: python-checkins at python.org (senthil.kumaran) Date: Tue, 13 Aug 2013 07:26:23 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_Increasing_tes?= =?utf-8?q?t_coverage_of_ftplib=2E_Patch_by_Muhammad_Jehanzeb?= Message-ID: <3cDj8H20MxzSrn@mail.python.org> http://hg.python.org/cpython/rev/6816ae6c49ce changeset: 85148:6816ae6c49ce branch: 2.7 user: Senthil Kumaran date: Mon Aug 12 22:24:43 2013 -0700 summary: Increasing test coverage of ftplib. Patch by Muhammad Jehanzeb files: Lib/test/test_ftplib.py | 8 ++++++++ 1 files changed, 8 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py --- a/Lib/test/test_ftplib.py +++ b/Lib/test/test_ftplib.py @@ -474,6 +474,14 @@ def test_rmd(self): self.client.rmd('foo') + def test_cwd(self): + dir = self.client.cwd('/foo') + self.assertEqual(dir, '250 cwd ok') + + def test_mkd(self): + dir = self.client.mkd('/foo') + self.assertEqual(dir, '/foo') + def test_pwd(self): dir = self.client.pwd() self.assertEqual(dir, 'pwd ok') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 07:26:24 2013 From: python-checkins at python.org (senthil.kumaran) Date: Tue, 13 Aug 2013 07:26:24 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_Increasing_tes?= =?utf-8?q?t_coverage_of_ftplib=2E_Patch_by_Muhammad_Jehanzeb?= Message-ID: <3cDj8J3x0Kz7LjM@mail.python.org> http://hg.python.org/cpython/rev/68b599740aa0 changeset: 85149:68b599740aa0 branch: 3.3 parent: 85146:f8cf0f997dfa user: Senthil Kumaran date: Mon Aug 12 22:25:27 2013 -0700 summary: Increasing test coverage of ftplib. Patch by Muhammad Jehanzeb files: Lib/test/test_ftplib.py | 8 ++++++++ 1 files changed, 8 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py --- a/Lib/test/test_ftplib.py +++ b/Lib/test/test_ftplib.py @@ -529,6 +529,14 @@ def test_rmd(self): self.client.rmd('foo') + def test_cwd(self): + dir = self.client.cwd('/foo') + self.assertEqual(dir, '250 cwd ok') + + def test_mkd(self): + dir = self.client.mkd('/foo') + self.assertEqual(dir, '/foo') + def test_pwd(self): dir = self.client.pwd() self.assertEqual(dir, 'pwd ok') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 07:26:25 2013 From: python-checkins at python.org (senthil.kumaran) Date: Tue, 13 Aug 2013 07:26:25 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_merge_from_3=2E3?= Message-ID: <3cDj8K5pyJz7LjN@mail.python.org> http://hg.python.org/cpython/rev/f8942b8e6774 changeset: 85150:f8942b8e6774 parent: 85145:7ba77d77b499 parent: 85149:68b599740aa0 user: Senthil Kumaran date: Mon Aug 12 22:26:14 2013 -0700 summary: merge from 3.3 Increasing test coverage of ftplib. Patch by Muhammad Jehanzeb files: Lib/test/test_ftplib.py | 8 ++++++++ 1 files changed, 8 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py --- a/Lib/test/test_ftplib.py +++ b/Lib/test/test_ftplib.py @@ -530,6 +530,14 @@ def test_rmd(self): self.client.rmd('foo') + def test_cwd(self): + dir = self.client.cwd('/foo') + self.assertEqual(dir, '250 cwd ok') + + def test_mkd(self): + dir = self.client.mkd('/foo') + self.assertEqual(dir, '/foo') + def test_pwd(self): dir = self.client.pwd() self.assertEqual(dir, 'pwd ok') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 10:16:22 2013 From: python-checkins at python.org (ned.deily) Date: Tue, 13 Aug 2013 10:16:22 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgMTg3MjQ6?= =?utf-8?q?_Fix_typo_noticed_by_Susan_Tan=2E?= Message-ID: <3cDmwQ33FWzMxN@mail.python.org> http://hg.python.org/cpython/rev/bd030e70cecb changeset: 85151:bd030e70cecb branch: 2.7 parent: 85148:6816ae6c49ce user: Ned Deily date: Tue Aug 13 01:11:56 2013 -0700 summary: Issue 18724: Fix typo noticed by Susan Tan. files: Doc/library/smtplib.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst --- a/Doc/library/smtplib.rst +++ b/Doc/library/smtplib.rst @@ -79,7 +79,7 @@ .. exception:: SMTPException - The base exception class for all the other excpetions provided by this + The base exception class for all the other exceptions provided by this module. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 10:16:23 2013 From: python-checkins at python.org (ned.deily) Date: Tue, 13 Aug 2013 10:16:23 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgMTg3MjQ6?= =?utf-8?q?_Fix_typo_noticed_by_Susan_Tan=2E?= Message-ID: <3cDmwR4rxFzMxN@mail.python.org> http://hg.python.org/cpython/rev/76d71bc949b6 changeset: 85152:76d71bc949b6 branch: 3.3 parent: 85149:68b599740aa0 user: Ned Deily date: Tue Aug 13 01:12:41 2013 -0700 summary: Issue 18724: Fix typo noticed by Susan Tan. files: Doc/library/smtplib.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst --- a/Doc/library/smtplib.rst +++ b/Doc/library/smtplib.rst @@ -110,7 +110,7 @@ .. exception:: SMTPException - The base exception class for all the other excpetions provided by this + The base exception class for all the other exceptions provided by this module. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 10:16:24 2013 From: python-checkins at python.org (ned.deily) Date: Tue, 13 Aug 2013 10:16:24 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_18724=3A_Fix_typo_noticed_by_Susan_Tan=2E?= Message-ID: <3cDmwS6qtNzRqd@mail.python.org> http://hg.python.org/cpython/rev/8dccd3b51711 changeset: 85153:8dccd3b51711 parent: 85150:f8942b8e6774 parent: 85152:76d71bc949b6 user: Ned Deily date: Tue Aug 13 01:15:14 2013 -0700 summary: Issue 18724: Fix typo noticed by Susan Tan. files: Doc/library/smtplib.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst --- a/Doc/library/smtplib.rst +++ b/Doc/library/smtplib.rst @@ -111,7 +111,7 @@ .. exception:: SMTPException Subclass of :exc:`OSError` that is the base exception class for all - the other excpetions provided by this module. + the other exceptions provided by this module. .. exception:: SMTPServerDisconnected -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 11:54:52 2013 From: python-checkins at python.org (ezio.melotti) Date: Tue, 13 Aug 2013 11:54:52 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogIzE4Njg3OiByZW1v?= =?utf-8?q?ve_obsolete_comment=2E__Patch_by_F=C3=A9vry_Thibault=2E?= Message-ID: <3cDq644sgwz7LjP@mail.python.org> http://hg.python.org/cpython/rev/49edf4cbc453 changeset: 85154:49edf4cbc453 branch: 2.7 parent: 85151:bd030e70cecb user: Ezio Melotti date: Tue Aug 13 12:53:40 2013 +0300 summary: #18687: remove obsolete comment. Patch by F?vry Thibault. files: Lib/test/leakers/test_ctypes.py | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Lib/test/leakers/test_ctypes.py b/Lib/test/leakers/test_ctypes.py --- a/Lib/test/leakers/test_ctypes.py +++ b/Lib/test/leakers/test_ctypes.py @@ -1,6 +1,5 @@ # Taken from Lib/ctypes/test/test_keeprefs.py, PointerToStructure.test(). -# When this leak is fixed, remember to remove from Misc/build.sh LEAKY_TESTS. from ctypes import Structure, c_int, POINTER import gc -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 11:54:53 2013 From: python-checkins at python.org (ezio.melotti) Date: Tue, 13 Aug 2013 11:54:53 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4Njg3OiByZW1v?= =?utf-8?q?ve_obsolete_comment=2E__Patch_by_F=C3=A9vry_Thibault=2E?= Message-ID: <3cDq656mlXz7LjW@mail.python.org> http://hg.python.org/cpython/rev/49fa63610c7f changeset: 85155:49fa63610c7f branch: 3.3 parent: 85152:76d71bc949b6 user: Ezio Melotti date: Tue Aug 13 12:53:40 2013 +0300 summary: #18687: remove obsolete comment. Patch by F?vry Thibault. files: Lib/test/leakers/test_ctypes.py | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Lib/test/leakers/test_ctypes.py b/Lib/test/leakers/test_ctypes.py --- a/Lib/test/leakers/test_ctypes.py +++ b/Lib/test/leakers/test_ctypes.py @@ -1,6 +1,5 @@ # Taken from Lib/ctypes/test/test_keeprefs.py, PointerToStructure.test(). -# When this leak is fixed, remember to remove from Misc/build.sh LEAKY_TESTS. from ctypes import Structure, c_int, POINTER import gc -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 11:54:55 2013 From: python-checkins at python.org (ezio.melotti) Date: Tue, 13 Aug 2013 11:54:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4Njg3OiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cDq671Rwbz7Ljm@mail.python.org> http://hg.python.org/cpython/rev/1ea89e5f40cf changeset: 85156:1ea89e5f40cf parent: 85153:8dccd3b51711 parent: 85155:49fa63610c7f user: Ezio Melotti date: Tue Aug 13 12:54:29 2013 +0300 summary: #18687: merge with 3.3. files: Lib/test/leakers/test_ctypes.py | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Lib/test/leakers/test_ctypes.py b/Lib/test/leakers/test_ctypes.py --- a/Lib/test/leakers/test_ctypes.py +++ b/Lib/test/leakers/test_ctypes.py @@ -1,6 +1,5 @@ # Taken from Lib/ctypes/test/test_keeprefs.py, PointerToStructure.test(). -# When this leak is fixed, remember to remove from Misc/build.sh LEAKY_TESTS. from ctypes import Structure, c_int, POINTER import gc -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 18:54:48 2013 From: python-checkins at python.org (brett.cannon) Date: Tue, 13 Aug 2013 18:54:48 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?devinabox=3A_Suggest_PyPy3_or_install?= =?utf-8?q?ed_Python_3_interpreter_when_generating_the_HTML?= Message-ID: <3cF0Qc5CZhz7Ljg@mail.python.org> http://hg.python.org/devinabox/rev/8ed422a31d00 changeset: 56:8ed422a31d00 user: Brett Cannon date: Tue Aug 13 12:54:43 2013 -0400 summary: Suggest PyPy3 or installed Python 3 interpreter when generating the HTML coverage report. files: README | 11 +++-------- 1 files changed, 3 insertions(+), 8 deletions(-) diff --git a/README b/README --- a/README +++ b/README @@ -126,7 +126,7 @@ #. Extract setuptools and coverage: ``tar -x -f setuptools-*.tar.gz; tar -x -f coverage-*.tar.gz`` #. Install setuptools in the venv: ``../venv/bin/python3 setup.py install`` #. Install coverage in the venv -#. Set PYTHONPATH to ``fullcoverage`` (will need to change the directory): ``export PYTHONPATH=../coverage-N.N/coverage/fullcoverage`` +#. Set PYTHONPATH to ``fullcoverage`` (need to change your directory): ``export PYTHONPATH=../coverage-N.N/coverage/fullcoverage`` #. Run coverage from the venv: ``./bin/python -m coverage run --pylib -m test`` #. Unset PYTHONPATH: ``unset PYTHONPATH`` #. Generate coverage report: ``./bin/python -m coverage html --directory=../coverage_report -i --include="../cpython/Lib/*" --title="CPython test coverage report"`` @@ -135,13 +135,8 @@ #. Clean up the cpython clone: either ``make distclean`` or check it out again Do be aware that this step takes a few **hours**. If you find report generation -is the bottleneck (typically because of memory pressure), you can generate the -HTML reports in chunks at the cost of not having a comprehensive index. E.g. to -report for every module/package starting with the letter 'a':: - - ./bin/python3 -m coverage html --directory ../coverage_report -i ../cpython/Lib/a*.py ../cpython/Lib/a*/*.py ../cpython/Lib/a*/*/*.py - -You can then create an index using the textual report from coverage.py. +is the bottleneck you can try using PyPy3 or your installed Python 3 interpreter +to generate the report. .. _setuptools: https://pypi.python.org/pypi/setuptools .. _coverage: https://pypi.python.org/pypi/coverage -- Repository URL: http://hg.python.org/devinabox From python-checkins at python.org Tue Aug 13 20:20:34 2013 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 13 Aug 2013 20:20:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318722=3A_Remove_u?= =?utf-8?q?ses_of_the_=22register=22_keyword_in_C_code=2E?= Message-ID: <3cF2KZ0tJKz7Lkk@mail.python.org> http://hg.python.org/cpython/rev/e7f6cef7a4cc changeset: 85157:e7f6cef7a4cc user: Antoine Pitrou date: Tue Aug 13 20:18:52 2013 +0200 summary: Issue #18722: Remove uses of the "register" keyword in C code. files: Include/bytesobject.h | 10 +- Include/unicodeobject.h | 8 +- Misc/NEWS | 2 + Modules/_codecsmodule.c | 6 +- Modules/_dbmmodule.c | 14 +- Modules/_gdbmmodule.c | 22 ++-- Modules/arraymodule.c | 6 +- Objects/bytearrayobject.c | 12 +- Objects/bytes_methods.c | 30 +++--- Objects/bytesobject.c | 44 +++++----- Objects/classobject.c | 6 +- Objects/complexobject.c | 2 +- Objects/dictobject.c | 78 +++++++++--------- Objects/floatobject.c | 4 +- Objects/listobject.c | 14 +- Objects/longobject.c | 24 ++-- Objects/namespaceobject.c | 2 +- Objects/object.c | 4 +- Objects/setobject.c | 74 +++++++++--------- Objects/stringlib/codecs.h | 6 +- Objects/stringlib/eq.h | 4 +- Objects/stringlib/find_max_char.h | 4 +- Objects/stringlib/split.h | 4 +- Objects/tupleobject.c | 48 +++++----- Objects/unicodectype.c | 2 +- Objects/unicodeobject.c | 26 +++--- Objects/unicodetype_db.h | 4 +- Parser/grammar1.c | 6 +- Parser/node.c | 2 +- Parser/parser.c | 34 ++++---- Parser/tokenizer.c | 12 +- Python/ceval.c | 24 ++-- Python/codecs.c | 4 +- Python/dynload_aix.c | 12 +- Python/marshal.c | 4 +- Python/mystrtoul.c | 10 +- Python/strdup.c | 2 +- Tools/unicode/makeunicodedata.py | 4 +- 38 files changed, 288 insertions(+), 286 deletions(-) diff --git a/Include/bytesobject.h b/Include/bytesobject.h --- a/Include/bytesobject.h +++ b/Include/bytesobject.h @@ -86,11 +86,11 @@ 0-terminated (passing a string with embedded NULL characters will cause an exception). */ PyAPI_FUNC(int) PyBytes_AsStringAndSize( - register PyObject *obj, /* string or Unicode object */ - register char **s, /* pointer to buffer variable */ - register Py_ssize_t *len /* pointer to length variable or NULL - (only possible for 0-terminated - strings) */ + PyObject *obj, /* string or Unicode object */ + char **s, /* pointer to buffer variable */ + Py_ssize_t *len /* pointer to length variable or NULL + (only possible for 0-terminated + strings) */ ); /* Using the current locale, insert the thousands grouping diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h --- a/Include/unicodeobject.h +++ b/Include/unicodeobject.h @@ -859,7 +859,7 @@ */ PyAPI_FUNC(PyObject*) PyUnicode_FromEncodedObject( - register PyObject *obj, /* Object */ + PyObject *obj, /* Object */ const char *encoding, /* encoding */ const char *errors /* error handling */ ); @@ -878,7 +878,7 @@ */ PyAPI_FUNC(PyObject*) PyUnicode_FromObject( - register PyObject *obj /* Object */ + PyObject *obj /* Object */ ); PyAPI_FUNC(PyObject *) PyUnicode_FromFormatV( @@ -1015,7 +1015,7 @@ The buffer is copied into the new object. */ PyAPI_FUNC(PyObject*) PyUnicode_FromWideChar( - register const wchar_t *w, /* wchar_t buffer */ + const wchar_t *w, /* wchar_t buffer */ Py_ssize_t size /* size of buffer */ ); @@ -1033,7 +1033,7 @@ PyAPI_FUNC(Py_ssize_t) PyUnicode_AsWideChar( PyObject *unicode, /* Unicode object */ - register wchar_t *w, /* wchar_t buffer */ + wchar_t *w, /* wchar_t buffer */ Py_ssize_t size /* size of buffer */ ); diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #18722: Remove uses of the "register" keyword in C code. + - Issue #18667: Add missing "HAVE_FCHOWNAT" symbol to posix._have_functions. - Issue #16499: Add command line option for isolated mode. diff --git a/Modules/_codecsmodule.c b/Modules/_codecsmodule.c --- a/Modules/_codecsmodule.c +++ b/Modules/_codecsmodule.c @@ -189,9 +189,9 @@ return NULL; } else { - register Py_ssize_t i; - register char c; - register char *p = PyBytes_AS_STRING(v); + Py_ssize_t i; + char c; + char *p = PyBytes_AS_STRING(v); for (i = 0; i < size; i++) { /* There's at least enough room for a hex escape */ diff --git a/Modules/_dbmmodule.c b/Modules/_dbmmodule.c --- a/Modules/_dbmmodule.c +++ b/Modules/_dbmmodule.c @@ -63,7 +63,7 @@ /* Methods */ static void -dbm_dealloc(register dbmobject *dp) +dbm_dealloc(dbmobject *dp) { if ( dp->di_dbm ) dbm_close(dp->di_dbm); @@ -91,7 +91,7 @@ } static PyObject * -dbm_subscript(dbmobject *dp, register PyObject *key) +dbm_subscript(dbmobject *dp, PyObject *key) { datum drec, krec; Py_ssize_t tmp_size; @@ -166,7 +166,7 @@ }; static PyObject * -dbm__close(register dbmobject *dp, PyObject *unused) +dbm__close(dbmobject *dp, PyObject *unused) { if (dp->di_dbm) dbm_close(dp->di_dbm); @@ -176,9 +176,9 @@ } static PyObject * -dbm_keys(register dbmobject *dp, PyObject *unused) +dbm_keys(dbmobject *dp, PyObject *unused) { - register PyObject *v, *item; + PyObject *v, *item; datum key; int err; @@ -249,7 +249,7 @@ }; static PyObject * -dbm_get(register dbmobject *dp, PyObject *args) +dbm_get(dbmobject *dp, PyObject *args) { datum key, val; PyObject *defvalue = Py_None; @@ -272,7 +272,7 @@ } static PyObject * -dbm_setdefault(register dbmobject *dp, PyObject *args) +dbm_setdefault(dbmobject *dp, PyObject *args) { datum key, val; PyObject *defvalue = NULL; diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c --- a/Modules/_gdbmmodule.c +++ b/Modules/_gdbmmodule.c @@ -79,7 +79,7 @@ /* Methods */ static void -dbm_dealloc(register dbmobject *dp) +dbm_dealloc(dbmobject *dp) { if (dp->di_dbm) gdbm_close(dp->di_dbm); @@ -112,7 +112,7 @@ } static PyObject * -dbm_subscript(dbmobject *dp, register PyObject *key) +dbm_subscript(dbmobject *dp, PyObject *key) { PyObject *v; datum drec, krec; @@ -232,7 +232,7 @@ Closes the database."); static PyObject * -dbm_close(register dbmobject *dp, PyObject *unused) +dbm_close(dbmobject *dp, PyObject *unused) { if (dp->di_dbm) gdbm_close(dp->di_dbm); @@ -247,9 +247,9 @@ Get a list of all keys in the database."); static PyObject * -dbm_keys(register dbmobject *dp, PyObject *unused) +dbm_keys(dbmobject *dp, PyObject *unused) { - register PyObject *v, *item; + PyObject *v, *item; datum key, nextkey; int err; @@ -328,9 +328,9 @@ returns the starting key."); static PyObject * -dbm_firstkey(register dbmobject *dp, PyObject *unused) +dbm_firstkey(dbmobject *dp, PyObject *unused) { - register PyObject *v; + PyObject *v; datum key; check_dbmobject_open(dp); @@ -358,9 +358,9 @@ k = db.nextkey(k)"); static PyObject * -dbm_nextkey(register dbmobject *dp, PyObject *args) +dbm_nextkey(dbmobject *dp, PyObject *args) { - register PyObject *v; + PyObject *v; datum key, nextkey; if (!PyArg_ParseTuple(args, "s#:nextkey", &key.dptr, &key.dsize)) @@ -387,7 +387,7 @@ kept and reused as new (key,value) pairs are added."); static PyObject * -dbm_reorganize(register dbmobject *dp, PyObject *unused) +dbm_reorganize(dbmobject *dp, PyObject *unused) { check_dbmobject_open(dp); errno = 0; @@ -408,7 +408,7 @@ any unwritten data to be written to the disk."); static PyObject * -dbm_sync(register dbmobject *dp, PyObject *unused) +dbm_sync(dbmobject *dp, PyObject *unused) { check_dbmobject_open(dp); gdbm_sync(dp->di_dbm); diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -513,7 +513,7 @@ static PyObject * getarrayitem(PyObject *op, Py_ssize_t i) { - register arrayobject *ap; + arrayobject *ap; assert(array_Check(op)); ap = (arrayobject *)op; assert(i>=0 && iob_descr->itemsize; - register char *p, *q; + Py_ssize_t itemsize = self->ob_descr->itemsize; + char *p, *q; /* little buffer to hold items while swapping */ char tmp[256]; /* 8 is probably enough -- but why skimp */ assert((size_t)itemsize <= sizeof(tmp)); diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -862,9 +862,9 @@ /* 15 == strlen(quote_prefix) + 2 + strlen(quote_postfix) + 1 */ size_t newsize; PyObject *v; - register Py_ssize_t i; - register char c; - register char *p; + Py_ssize_t i; + char c; + char *p; int quote; char *test, *start; char *buffer; @@ -1431,9 +1431,9 @@ static PyObject * bytearray_translate(PyByteArrayObject *self, PyObject *args) { - register char *input, *output; - register const char *table; - register Py_ssize_t i, c; + char *input, *output; + const char *table; + Py_ssize_t i, c; PyObject *input_obj = (PyObject*)self; const char *output_start; Py_ssize_t inlen; diff --git a/Objects/bytes_methods.c b/Objects/bytes_methods.c --- a/Objects/bytes_methods.c +++ b/Objects/bytes_methods.c @@ -10,9 +10,9 @@ PyObject* _Py_bytes_isspace(const char *cptr, Py_ssize_t len) { - register const unsigned char *p + const unsigned char *p = (unsigned char *) cptr; - register const unsigned char *e; + const unsigned char *e; /* Shortcut for single character strings */ if (len == 1 && Py_ISSPACE(*p)) @@ -40,9 +40,9 @@ PyObject* _Py_bytes_isalpha(const char *cptr, Py_ssize_t len) { - register const unsigned char *p + const unsigned char *p = (unsigned char *) cptr; - register const unsigned char *e; + const unsigned char *e; /* Shortcut for single character strings */ if (len == 1 && Py_ISALPHA(*p)) @@ -70,9 +70,9 @@ PyObject* _Py_bytes_isalnum(const char *cptr, Py_ssize_t len) { - register const unsigned char *p + const unsigned char *p = (unsigned char *) cptr; - register const unsigned char *e; + const unsigned char *e; /* Shortcut for single character strings */ if (len == 1 && Py_ISALNUM(*p)) @@ -100,9 +100,9 @@ PyObject* _Py_bytes_isdigit(const char *cptr, Py_ssize_t len) { - register const unsigned char *p + const unsigned char *p = (unsigned char *) cptr; - register const unsigned char *e; + const unsigned char *e; /* Shortcut for single character strings */ if (len == 1 && Py_ISDIGIT(*p)) @@ -130,9 +130,9 @@ PyObject* _Py_bytes_islower(const char *cptr, Py_ssize_t len) { - register const unsigned char *p + const unsigned char *p = (unsigned char *) cptr; - register const unsigned char *e; + const unsigned char *e; int cased; /* Shortcut for single character strings */ @@ -164,9 +164,9 @@ PyObject* _Py_bytes_isupper(const char *cptr, Py_ssize_t len) { - register const unsigned char *p + const unsigned char *p = (unsigned char *) cptr; - register const unsigned char *e; + const unsigned char *e; int cased; /* Shortcut for single character strings */ @@ -200,9 +200,9 @@ PyObject* _Py_bytes_istitle(const char *cptr, Py_ssize_t len) { - register const unsigned char *p + const unsigned char *p = (unsigned char *) cptr; - register const unsigned char *e; + const unsigned char *e; int cased, previous_is_cased; /* Shortcut for single character strings */ @@ -217,7 +217,7 @@ cased = 0; previous_is_cased = 0; for (; p < e; p++) { - register const unsigned char ch = *p; + const unsigned char ch = *p; if (Py_ISUPPER(ch)) { if (previous_is_cased) diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -74,7 +74,7 @@ PyObject * PyBytes_FromStringAndSize(const char *str, Py_ssize_t size) { - register PyBytesObject *op; + PyBytesObject *op; if (size < 0) { PyErr_SetString(PyExc_SystemError, "Negative size passed to PyBytes_FromStringAndSize"); @@ -126,8 +126,8 @@ PyObject * PyBytes_FromString(const char *str) { - register size_t size; - register PyBytesObject *op; + size_t size; + PyBytesObject *op; assert(str != NULL); size = strlen(str); @@ -513,7 +513,7 @@ /* object api */ Py_ssize_t -PyBytes_Size(register PyObject *op) +PyBytes_Size(PyObject *op) { if (!PyBytes_Check(op)) { PyErr_Format(PyExc_TypeError, @@ -524,7 +524,7 @@ } char * -PyBytes_AsString(register PyObject *op) +PyBytes_AsString(PyObject *op) { if (!PyBytes_Check(op)) { PyErr_Format(PyExc_TypeError, @@ -535,9 +535,9 @@ } int -PyBytes_AsStringAndSize(register PyObject *obj, - register char **s, - register Py_ssize_t *len) +PyBytes_AsStringAndSize(PyObject *obj, + char **s, + Py_ssize_t *len) { if (s == NULL) { PyErr_BadInternalCall(); @@ -579,7 +579,7 @@ PyObject * PyBytes_Repr(PyObject *obj, int smartquotes) { - register PyBytesObject* op = (PyBytesObject*) obj; + PyBytesObject* op = (PyBytesObject*) obj; Py_ssize_t i, length = Py_SIZE(op); size_t newsize, squotes, dquotes; PyObject *v; @@ -718,12 +718,12 @@ } static PyObject * -bytes_repeat(register PyBytesObject *a, register Py_ssize_t n) +bytes_repeat(PyBytesObject *a, Py_ssize_t n) { - register Py_ssize_t i; - register Py_ssize_t j; - register Py_ssize_t size; - register PyBytesObject *op; + Py_ssize_t i; + Py_ssize_t j; + Py_ssize_t size; + PyBytesObject *op; size_t nbytes; if (n < 0) n = 0; @@ -793,7 +793,7 @@ } static PyObject * -bytes_item(PyBytesObject *a, register Py_ssize_t i) +bytes_item(PyBytesObject *a, Py_ssize_t i) { if (i < 0 || i >= Py_SIZE(a)) { PyErr_SetString(PyExc_IndexError, "index out of range"); @@ -1461,9 +1461,9 @@ static PyObject * bytes_translate(PyBytesObject *self, PyObject *args) { - register char *input, *output; + char *input, *output; const char *table; - register Py_ssize_t i, c, changed = 0; + Py_ssize_t i, c, changed = 0; PyObject *input_obj = (PyObject*)self; const char *output_start, *del_table=NULL; Py_ssize_t inlen, tablen, dellen = 0; @@ -2748,9 +2748,9 @@ }; void -PyBytes_Concat(register PyObject **pv, register PyObject *w) +PyBytes_Concat(PyObject **pv, PyObject *w) { - register PyObject *v; + PyObject *v; assert(pv != NULL); if (*pv == NULL) return; @@ -2764,7 +2764,7 @@ } void -PyBytes_ConcatAndDel(register PyObject **pv, register PyObject *w) +PyBytes_ConcatAndDel(PyObject **pv, PyObject *w) { PyBytes_Concat(pv, w); Py_XDECREF(w); @@ -2788,8 +2788,8 @@ int _PyBytes_Resize(PyObject **pv, Py_ssize_t newsize) { - register PyObject *v; - register PyBytesObject *sv; + PyObject *v; + PyBytesObject *sv; v = *pv; if (!PyBytes_Check(v) || Py_REFCNT(v) != 1 || newsize < 0) { *pv = 0; diff --git a/Objects/classobject.c b/Objects/classobject.c --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -44,7 +44,7 @@ PyObject * PyMethod_New(PyObject *func, PyObject *self) { - register PyMethodObject *im; + PyMethodObject *im; if (self == NULL) { PyErr_BadInternalCall(); return NULL; @@ -164,7 +164,7 @@ } static void -method_dealloc(register PyMethodObject *im) +method_dealloc(PyMethodObject *im) { _PyObject_GC_UNTRACK(im); if (im->im_weakreflist != NULL) @@ -509,7 +509,7 @@ static PyObject * instancemethod_descr_get(PyObject *descr, PyObject *obj, PyObject *type) { - register PyObject *func = PyInstanceMethod_GET_FUNCTION(descr); + PyObject *func = PyInstanceMethod_GET_FUNCTION(descr); if (obj == NULL) { Py_INCREF(func); return func; diff --git a/Objects/complexobject.c b/Objects/complexobject.c --- a/Objects/complexobject.c +++ b/Objects/complexobject.c @@ -211,7 +211,7 @@ PyObject * PyComplex_FromCComplex(Py_complex cval) { - register PyComplexObject *op; + PyComplexObject *op; /* Inline PyObject_New */ op = (PyComplexObject *) PyObject_MALLOC(sizeof(PyComplexObject)); diff --git a/Objects/dictobject.c b/Objects/dictobject.c --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -467,13 +467,13 @@ lookdict(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject ***value_addr) { - register size_t i; - register size_t perturb; - register PyDictKeyEntry *freeslot; - register size_t mask; + size_t i; + size_t perturb; + PyDictKeyEntry *freeslot; + size_t mask; PyDictKeyEntry *ep0; - register PyDictKeyEntry *ep; - register int cmp; + PyDictKeyEntry *ep; + int cmp; PyObject *startkey; top: @@ -559,12 +559,12 @@ lookdict_unicode(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject ***value_addr) { - register size_t i; - register size_t perturb; - register PyDictKeyEntry *freeslot; - register size_t mask = DK_MASK(mp->ma_keys); + size_t i; + size_t perturb; + PyDictKeyEntry *freeslot; + size_t mask = DK_MASK(mp->ma_keys); PyDictKeyEntry *ep0 = &mp->ma_keys->dk_entries[0]; - register PyDictKeyEntry *ep; + PyDictKeyEntry *ep; /* Make sure this function doesn't have to handle non-unicode keys, including subclasses of str; e.g., one reason to subclass @@ -624,11 +624,11 @@ lookdict_unicode_nodummy(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject ***value_addr) { - register size_t i; - register size_t perturb; - register size_t mask = DK_MASK(mp->ma_keys); + size_t i; + size_t perturb; + size_t mask = DK_MASK(mp->ma_keys); PyDictKeyEntry *ep0 = &mp->ma_keys->dk_entries[0]; - register PyDictKeyEntry *ep; + PyDictKeyEntry *ep; /* Make sure this function doesn't have to handle non-unicode keys, including subclasses of str; e.g., one reason to subclass @@ -669,11 +669,11 @@ lookdict_split(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject ***value_addr) { - register size_t i; - register size_t perturb; - register size_t mask = DK_MASK(mp->ma_keys); + size_t i; + size_t perturb; + size_t mask = DK_MASK(mp->ma_keys); PyDictKeyEntry *ep0 = &mp->ma_keys->dk_entries[0]; - register PyDictKeyEntry *ep; + PyDictKeyEntry *ep; if (!PyUnicode_CheckExact(key)) { ep = lookdict(mp, key, hash, value_addr); @@ -1498,7 +1498,7 @@ } static PyObject * -dict_subscript(PyDictObject *mp, register PyObject *key) +dict_subscript(PyDictObject *mp, PyObject *key) { PyObject *v; Py_hash_t hash; @@ -1554,10 +1554,10 @@ }; static PyObject * -dict_keys(register PyDictObject *mp) +dict_keys(PyDictObject *mp) { - register PyObject *v; - register Py_ssize_t i, j; + PyObject *v; + Py_ssize_t i, j; PyDictKeyEntry *ep; Py_ssize_t size, n, offset; PyObject **value_ptr; @@ -1598,10 +1598,10 @@ } static PyObject * -dict_values(register PyDictObject *mp) +dict_values(PyDictObject *mp) { - register PyObject *v; - register Py_ssize_t i, j; + PyObject *v; + Py_ssize_t i, j; Py_ssize_t size, n, offset; PyObject **value_ptr; @@ -1640,10 +1640,10 @@ } static PyObject * -dict_items(register PyDictObject *mp) +dict_items(PyDictObject *mp) { - register PyObject *v; - register Py_ssize_t i, j, n; + PyObject *v; + Py_ssize_t i, j, n; Py_ssize_t size, offset; PyObject *item, *key; PyDictKeyEntry *ep; @@ -1915,8 +1915,8 @@ int PyDict_Merge(PyObject *a, PyObject *b, int override) { - register PyDictObject *mp, *other; - register Py_ssize_t i, n; + PyDictObject *mp, *other; + Py_ssize_t i, n; PyDictKeyEntry *entry; /* We accept for the argument either a concrete dictionary object, @@ -2013,7 +2013,7 @@ } static PyObject * -dict_copy(register PyDictObject *mp) +dict_copy(PyDictObject *mp) { return PyDict_Copy((PyObject*)mp); } @@ -2175,7 +2175,7 @@ } static PyObject * -dict_contains(register PyDictObject *mp, PyObject *key) +dict_contains(PyDictObject *mp, PyObject *key) { Py_hash_t hash; PyDictKeyEntry *ep; @@ -2194,7 +2194,7 @@ } static PyObject * -dict_get(register PyDictObject *mp, PyObject *args) +dict_get(PyDictObject *mp, PyObject *args) { PyObject *key; PyObject *failobj = Py_None; @@ -2280,7 +2280,7 @@ } static PyObject * -dict_clear(register PyDictObject *mp) +dict_clear(PyDictObject *mp) { PyDict_Clear((PyObject *)mp); Py_RETURN_NONE; @@ -2824,8 +2824,8 @@ static PyObject *dictiter_iternextkey(dictiterobject *di) { PyObject *key; - register Py_ssize_t i, mask, offset; - register PyDictKeysObject *k; + Py_ssize_t i, mask, offset; + PyDictKeysObject *k; PyDictObject *d = di->di_dict; PyObject **value_ptr; @@ -2907,7 +2907,7 @@ static PyObject *dictiter_iternextvalue(dictiterobject *di) { PyObject *value; - register Py_ssize_t i, mask, offset; + Py_ssize_t i, mask, offset; PyDictObject *d = di->di_dict; PyObject **value_ptr; @@ -2988,7 +2988,7 @@ static PyObject *dictiter_iternextitem(dictiterobject *di) { PyObject *key, *value, *result = di->di_result; - register Py_ssize_t i, mask, offset; + Py_ssize_t i, mask, offset; PyDictObject *d = di->di_dict; PyObject **value_ptr; diff --git a/Objects/floatobject.c b/Objects/floatobject.c --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -109,7 +109,7 @@ PyObject * PyFloat_FromDouble(double fval) { - register PyFloatObject *op = free_list; + PyFloatObject *op = free_list; if (op != NULL) { free_list = (PyFloatObject *) Py_TYPE(op); numfree--; @@ -241,7 +241,7 @@ static int convert_to_double(PyObject **v, double *dbl) { - register PyObject *obj = *v; + PyObject *obj = *v; if (PyLong_Check(obj)) { *dbl = PyLong_AsDouble(obj); diff --git a/Objects/listobject.c b/Objects/listobject.c --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -213,11 +213,11 @@ } int -PyList_SetItem(register PyObject *op, register Py_ssize_t i, - register PyObject *newitem) +PyList_SetItem(PyObject *op, Py_ssize_t i, + PyObject *newitem) { - register PyObject *olditem; - register PyObject **p; + PyObject *olditem; + PyObject **p; if (!PyList_Check(op)) { Py_XDECREF(newitem); PyErr_BadInternalCall(); @@ -1058,9 +1058,9 @@ static int binarysort(sortslice lo, PyObject **hi, PyObject **start) { - register Py_ssize_t k; - register PyObject **l, **p, **r; - register PyObject *pivot; + Py_ssize_t k; + PyObject **l, **p, **r; + PyObject *pivot; assert(lo.keys <= start && start <= hi); /* assert [lo, start) is sorted */ diff --git a/Objects/longobject.c b/Objects/longobject.c --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -110,7 +110,7 @@ of the algorithms used, this could save at most be one word anyway. */ static PyLongObject * -long_normalize(register PyLongObject *v) +long_normalize(PyLongObject *v) { Py_ssize_t j = ABS(Py_SIZE(v)); Py_ssize_t i = j; @@ -340,7 +340,7 @@ PyLong_AsLongAndOverflow(PyObject *vv, int *overflow) { /* This version by Tim Peters */ - register PyLongObject *v; + PyLongObject *v; unsigned long x, prev; long res; Py_ssize_t i; @@ -463,7 +463,7 @@ Py_ssize_t PyLong_AsSsize_t(PyObject *vv) { - register PyLongObject *v; + PyLongObject *v; size_t x, prev; Py_ssize_t i; int sign; @@ -519,7 +519,7 @@ unsigned long PyLong_AsUnsignedLong(PyObject *vv) { - register PyLongObject *v; + PyLongObject *v; unsigned long x, prev; Py_ssize_t i; @@ -563,7 +563,7 @@ size_t PyLong_AsSize_t(PyObject *vv) { - register PyLongObject *v; + PyLongObject *v; size_t x, prev; Py_ssize_t i; @@ -606,7 +606,7 @@ static unsigned long _PyLong_AsUnsignedLongMask(PyObject *vv) { - register PyLongObject *v; + PyLongObject *v; unsigned long x; Py_ssize_t i; int sign; @@ -634,7 +634,7 @@ } unsigned long -PyLong_AsUnsignedLongMask(register PyObject *op) +PyLong_AsUnsignedLongMask(PyObject *op) { PyNumberMethods *nb; PyLongObject *lo; @@ -1250,7 +1250,7 @@ static unsigned PY_LONG_LONG _PyLong_AsUnsignedLongLongMask(PyObject *vv) { - register PyLongObject *v; + PyLongObject *v; unsigned PY_LONG_LONG x; Py_ssize_t i; int sign; @@ -1278,7 +1278,7 @@ } unsigned PY_LONG_LONG -PyLong_AsUnsignedLongLongMask(register PyObject *op) +PyLong_AsUnsignedLongLongMask(PyObject *op) { PyNumberMethods *nb; PyLongObject *lo; @@ -1326,7 +1326,7 @@ PyLong_AsLongLongAndOverflow(PyObject *vv, int *overflow) { /* This version by Tim Peters */ - register PyLongObject *v; + PyLongObject *v; unsigned PY_LONG_LONG x, prev; PY_LONG_LONG res; Py_ssize_t i; @@ -1744,7 +1744,7 @@ long_format_binary(PyObject *aa, int base, int alternate, PyObject **p_output, _PyUnicodeWriter *writer) { - register PyLongObject *a = (PyLongObject *)aa; + PyLongObject *a = (PyLongObject *)aa; PyObject *v; Py_ssize_t sz; Py_ssize_t size_a; @@ -2141,7 +2141,7 @@ just 1 digit at the start, so that the copying code was exercised for every digit beyond the first. ***/ - register twodigits c; /* current input character */ + twodigits c; /* current input character */ Py_ssize_t size_z; int i; int convwidth; diff --git a/Objects/namespaceobject.c b/Objects/namespaceobject.c --- a/Objects/namespaceobject.c +++ b/Objects/namespaceobject.c @@ -176,7 +176,7 @@ PyDoc_STRVAR(namespace_reduce__doc__, "Return state information for pickling"); static PyObject * -namespace_reduce(register _PyNamespaceObject *ns) +namespace_reduce(_PyNamespaceObject *ns) { PyObject *result, *args = PyTuple_New(0); diff --git a/Objects/object.c b/Objects/object.c --- a/Objects/object.c +++ b/Objects/object.c @@ -1834,10 +1834,10 @@ } void -_Py_ForgetReference(register PyObject *op) +_Py_ForgetReference(PyObject *op) { #ifdef SLOW_UNREF_CHECK - register PyObject *p; + PyObject *p; #endif if (op->ob_refcnt < 0) Py_FatalError("UNREF negative refcnt"); diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -75,15 +75,15 @@ */ static setentry * -set_lookkey(PySetObject *so, PyObject *key, register Py_hash_t hash) +set_lookkey(PySetObject *so, PyObject *key, Py_hash_t hash) { - register size_t i; /* Unsigned for defined overflow behavior. */ - register size_t perturb; - register setentry *freeslot; - register size_t mask = so->mask; + size_t i; /* Unsigned for defined overflow behavior. */ + size_t perturb; + setentry *freeslot; + size_t mask = so->mask; setentry *table = so->table; - register setentry *entry; - register int cmp; + setentry *entry; + int cmp; PyObject *startkey; i = (size_t)hash & mask; @@ -157,14 +157,14 @@ * see if the comparison altered the table. */ static setentry * -set_lookkey_unicode(PySetObject *so, PyObject *key, register Py_hash_t hash) +set_lookkey_unicode(PySetObject *so, PyObject *key, Py_hash_t hash) { - register size_t i; /* Unsigned for defined overflow behavior. */ - register size_t perturb; - register setentry *freeslot; - register size_t mask = so->mask; + size_t i; /* Unsigned for defined overflow behavior. */ + size_t perturb; + setentry *freeslot; + size_t mask = so->mask; setentry *table = so->table; - register setentry *entry; + setentry *entry; /* Make sure this function doesn't have to handle non-unicode keys, including subclasses of str; e.g., one reason to subclass @@ -211,9 +211,9 @@ Eats a reference to key. */ static int -set_insert_key(register PySetObject *so, PyObject *key, Py_hash_t hash) +set_insert_key(PySetObject *so, PyObject *key, Py_hash_t hash) { - register setentry *entry; + setentry *entry; assert(so->lookup != NULL); entry = so->lookup(so, key, hash); @@ -247,13 +247,13 @@ is responsible for incref'ing `key`. */ static void -set_insert_clean(register PySetObject *so, PyObject *key, Py_hash_t hash) +set_insert_clean(PySetObject *so, PyObject *key, Py_hash_t hash) { - register size_t i; - register size_t perturb; - register size_t mask = (size_t)so->mask; + size_t i; + size_t perturb; + size_t mask = (size_t)so->mask; setentry *table = so->table; - register setentry *entry; + setentry *entry; i = (size_t)hash & mask; entry = &table[i]; @@ -360,9 +360,9 @@ /* CAUTION: set_add_key/entry() must guarantee it won't resize the table */ static int -set_add_entry(register PySetObject *so, setentry *entry) +set_add_entry(PySetObject *so, setentry *entry) { - register Py_ssize_t n_used; + Py_ssize_t n_used; PyObject *key = entry->key; Py_hash_t hash = entry->hash; @@ -379,10 +379,10 @@ } static int -set_add_key(register PySetObject *so, PyObject *key) +set_add_key(PySetObject *so, PyObject *key) { - register Py_hash_t hash; - register Py_ssize_t n_used; + Py_hash_t hash; + Py_ssize_t n_used; if (!PyUnicode_CheckExact(key) || (hash = ((PyASCIIObject *) key)->hash) == -1) { @@ -407,7 +407,7 @@ static int set_discard_entry(PySetObject *so, setentry *oldentry) -{ register setentry *entry; +{ setentry *entry; PyObject *old_key; entry = (so->lookup)(so, oldentry->key, oldentry->hash); @@ -426,8 +426,8 @@ static int set_discard_key(PySetObject *so, PyObject *key) { - register Py_hash_t hash; - register setentry *entry; + Py_hash_t hash; + setentry *entry; PyObject *old_key; assert (PyAnySet_Check(so)); @@ -533,7 +533,7 @@ { Py_ssize_t i; Py_ssize_t mask; - register setentry *table; + setentry *table; assert (PyAnySet_Check(so)); i = *pos_ptr; @@ -553,7 +553,7 @@ static void set_dealloc(PySetObject *so) { - register setentry *entry; + setentry *entry; Py_ssize_t fill = so->fill; PyObject_GC_UnTrack(so); Py_TRASHCAN_SAFE_BEGIN(so) @@ -632,8 +632,8 @@ PySetObject *other; PyObject *key; Py_hash_t hash; - register Py_ssize_t i; - register setentry *entry; + Py_ssize_t i; + setentry *entry; assert (PyAnySet_Check(so)); assert (PyAnySet_Check(otherset)); @@ -701,8 +701,8 @@ static PyObject * set_pop(PySetObject *so) { - register Py_ssize_t i = 0; - register setentry *entry; + Py_ssize_t i = 0; + setentry *entry; PyObject *key; assert (PyAnySet_Check(so)); @@ -869,8 +869,8 @@ static PyObject *setiter_iternext(setiterobject *si) { PyObject *key; - register Py_ssize_t i, mask; - register setentry *entry; + Py_ssize_t i, mask; + setentry *entry; PySetObject *so = si->si_set; if (so == NULL) @@ -1024,7 +1024,7 @@ static PyObject * make_new_set(PyTypeObject *type, PyObject *iterable) { - register PySetObject *so = NULL; + PySetObject *so = NULL; if (dummy == NULL) { /* Auto-initialize dummy */ dummy = PyUnicode_FromString(""); diff --git a/Objects/stringlib/codecs.h b/Objects/stringlib/codecs.h --- a/Objects/stringlib/codecs.h +++ b/Objects/stringlib/codecs.h @@ -38,8 +38,8 @@ */ if (_Py_IS_ALIGNED(s, SIZEOF_LONG)) { /* Help register allocation */ - register const char *_s = s; - register STRINGLIB_CHAR *_p = p; + const char *_s = s; + STRINGLIB_CHAR *_p = p; while (_s < aligned_end) { /* Read a whole long at a time (either 4 or 8 bytes), and do a fast unrolled copy if it only contains ASCII @@ -499,7 +499,7 @@ reads are more expensive, better to defer to another iteration. */ if (_Py_IS_ALIGNED(q, SIZEOF_LONG)) { /* Fast path for runs of in-range non-surrogate chars. */ - register const unsigned char *_q = q; + const unsigned char *_q = q; while (_q < aligned_end) { unsigned long block = * (unsigned long *) _q; if (native_ordering) { diff --git a/Objects/stringlib/eq.h b/Objects/stringlib/eq.h --- a/Objects/stringlib/eq.h +++ b/Objects/stringlib/eq.h @@ -6,8 +6,8 @@ Py_LOCAL_INLINE(int) unicode_eq(PyObject *aa, PyObject *bb) { - register PyUnicodeObject *a = (PyUnicodeObject *)aa; - register PyUnicodeObject *b = (PyUnicodeObject *)bb; + PyUnicodeObject *a = (PyUnicodeObject *)aa; + PyUnicodeObject *b = (PyUnicodeObject *)bb; if (PyUnicode_READY(a) == -1 || PyUnicode_READY(b) == -1) { assert(0 && "unicode_eq ready fail"); diff --git a/Objects/stringlib/find_max_char.h b/Objects/stringlib/find_max_char.h --- a/Objects/stringlib/find_max_char.h +++ b/Objects/stringlib/find_max_char.h @@ -24,7 +24,7 @@ while (p < end) { if (_Py_IS_ALIGNED(p, SIZEOF_LONG)) { /* Help register allocation */ - register const unsigned char *_p = p; + const unsigned char *_p = p; while (_p < aligned_end) { unsigned long value = *(unsigned long *) _p; if (value & UCS1_ASCII_CHAR_MASK) @@ -66,7 +66,7 @@ #else #error Invalid STRINGLIB_SIZEOF_CHAR (must be 1, 2 or 4) #endif - register Py_UCS4 mask; + Py_UCS4 mask; Py_ssize_t n = end - begin; const STRINGLIB_CHAR *p = begin; const STRINGLIB_CHAR *unrolled_end = begin + _Py_SIZE_ROUND_DOWN(n, 4); diff --git a/Objects/stringlib/split.h b/Objects/stringlib/split.h --- a/Objects/stringlib/split.h +++ b/Objects/stringlib/split.h @@ -345,8 +345,8 @@ and the appends only done when the prealloc buffer is full. That's too much work for little gain.*/ - register Py_ssize_t i; - register Py_ssize_t j; + Py_ssize_t i; + Py_ssize_t j; PyObject *list = PyList_New(0); PyObject *sub; diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -63,9 +63,9 @@ } PyObject * -PyTuple_New(register Py_ssize_t size) +PyTuple_New(Py_ssize_t size) { - register PyTupleObject *op; + PyTupleObject *op; Py_ssize_t i; if (size < 0) { PyErr_BadInternalCall(); @@ -122,7 +122,7 @@ } Py_ssize_t -PyTuple_Size(register PyObject *op) +PyTuple_Size(PyObject *op) { if (!PyTuple_Check(op)) { PyErr_BadInternalCall(); @@ -133,7 +133,7 @@ } PyObject * -PyTuple_GetItem(register PyObject *op, register Py_ssize_t i) +PyTuple_GetItem(PyObject *op, Py_ssize_t i) { if (!PyTuple_Check(op)) { PyErr_BadInternalCall(); @@ -147,10 +147,10 @@ } int -PyTuple_SetItem(register PyObject *op, register Py_ssize_t i, PyObject *newitem) +PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem) { - register PyObject *olditem; - register PyObject **p; + PyObject *olditem; + PyObject **p; if (!PyTuple_Check(op) || op->ob_refcnt != 1) { Py_XDECREF(newitem); PyErr_BadInternalCall(); @@ -224,10 +224,10 @@ /* Methods */ static void -tupledealloc(register PyTupleObject *op) +tupledealloc(PyTupleObject *op) { - register Py_ssize_t i; - register Py_ssize_t len = Py_SIZE(op); + Py_ssize_t i; + Py_ssize_t len = Py_SIZE(op); PyObject_GC_UnTrack(op); Py_TRASHCAN_SAFE_BEGIN(op) if (len > 0) { @@ -330,10 +330,10 @@ static Py_hash_t tuplehash(PyTupleObject *v) { - register Py_uhash_t x; /* Unsigned for defined overflow behavior. */ - register Py_hash_t y; - register Py_ssize_t len = Py_SIZE(v); - register PyObject **p; + Py_uhash_t x; /* Unsigned for defined overflow behavior. */ + Py_hash_t y; + Py_ssize_t len = Py_SIZE(v); + PyObject **p; Py_uhash_t mult = _PyHASH_MULTIPLIER; x = 0x345678UL; p = v->ob_item; @@ -370,7 +370,7 @@ } static PyObject * -tupleitem(register PyTupleObject *a, register Py_ssize_t i) +tupleitem(PyTupleObject *a, Py_ssize_t i) { if (i < 0 || i >= Py_SIZE(a)) { PyErr_SetString(PyExc_IndexError, "tuple index out of range"); @@ -381,12 +381,12 @@ } static PyObject * -tupleslice(register PyTupleObject *a, register Py_ssize_t ilow, - register Py_ssize_t ihigh) +tupleslice(PyTupleObject *a, Py_ssize_t ilow, + Py_ssize_t ihigh) { - register PyTupleObject *np; + PyTupleObject *np; PyObject **src, **dest; - register Py_ssize_t i; + Py_ssize_t i; Py_ssize_t len; if (ilow < 0) ilow = 0; @@ -423,10 +423,10 @@ } static PyObject * -tupleconcat(register PyTupleObject *a, register PyObject *bb) +tupleconcat(PyTupleObject *a, PyObject *bb) { - register Py_ssize_t size; - register Py_ssize_t i; + Py_ssize_t size; + Py_ssize_t i; PyObject **src, **dest; PyTupleObject *np; if (!PyTuple_Check(bb)) { @@ -836,8 +836,8 @@ int _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize) { - register PyTupleObject *v; - register PyTupleObject *sv; + PyTupleObject *v; + PyTupleObject *sv; Py_ssize_t i; Py_ssize_t oldsize; diff --git a/Objects/unicodectype.c b/Objects/unicodectype.c --- a/Objects/unicodectype.c +++ b/Objects/unicodectype.c @@ -61,7 +61,7 @@ /* Returns the titlecase Unicode characters corresponding to ch or just ch if no titlecase mapping is known. */ -Py_UCS4 _PyUnicode_ToTitlecase(register Py_UCS4 ch) +Py_UCS4 _PyUnicode_ToTitlecase(Py_UCS4 ch) { const _PyUnicode_TypeRecord *ctype = gettyperecord(ch); diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -873,7 +873,7 @@ static PyUnicodeObject * _PyUnicode_New(Py_ssize_t length) { - register PyUnicodeObject *unicode; + PyUnicodeObject *unicode; size_t new_size; /* Optimization for empty strings */ @@ -1557,7 +1557,7 @@ } static void -unicode_dealloc(register PyObject *unicode) +unicode_dealloc(PyObject *unicode) { switch (PyUnicode_CHECK_INTERNED(unicode)) { case SSTATE_NOT_INTERNED: @@ -2287,7 +2287,7 @@ #ifdef HAVE_WCHAR_H PyObject * -PyUnicode_FromWideChar(register const wchar_t *w, Py_ssize_t size) +PyUnicode_FromWideChar(const wchar_t *w, Py_ssize_t size) { if (w == NULL) { if (size == 0) @@ -2898,7 +2898,7 @@ } PyObject * -PyUnicode_FromObject(register PyObject *obj) +PyUnicode_FromObject(PyObject *obj) { /* XXX Perhaps we should make this API an alias of PyObject_Str() instead ?! */ @@ -2920,7 +2920,7 @@ } PyObject * -PyUnicode_FromEncodedObject(register PyObject *obj, +PyUnicode_FromEncodedObject(PyObject *obj, const char *encoding, const char *errors) { @@ -4653,9 +4653,9 @@ if (_Py_IS_ALIGNED(p, SIZEOF_LONG)) { /* Fast path, see in STRINGLIB(utf8_decode) for an explanation. */ - /* Help register allocation */ - register const char *_p = p; - register Py_UCS1 * q = dest; + /* Help allocation */ + const char *_p = p; + Py_UCS1 * q = dest; while (_p < aligned_end) { unsigned long value = *(const unsigned long *) _p; if (value & ASCII_CHAR_MASK) @@ -4678,8 +4678,8 @@ /* Fast path, see in STRINGLIB(utf8_decode) in stringlib/codecs.h for an explanation. */ if (_Py_IS_ALIGNED(p, SIZEOF_LONG)) { - /* Help register allocation */ - register const char *_p = p; + /* Help allocation */ + const char *_p = p; while (_p < aligned_end) { unsigned long value = *(unsigned long *) _p; if (value & ASCII_CHAR_MASK) @@ -6513,7 +6513,7 @@ s += writer.pos; kind = writer.kind; while (s < e) { - register unsigned char c = (unsigned char)*s; + unsigned char c = (unsigned char)*s; if (c < 128) { PyUnicode_WRITE(kind, data, writer.pos, c); writer.pos++; @@ -14621,7 +14621,7 @@ void PyUnicode_InternInPlace(PyObject **p) { - register PyObject *s = *p; + PyObject *s = *p; PyObject *t; #ifdef Py_DEBUG assert(s != NULL); @@ -14954,7 +14954,7 @@ int Py_UNICODE_strncmp(const Py_UNICODE *s1, const Py_UNICODE *s2, size_t n) { - register Py_UNICODE u1, u2; + Py_UNICODE u1, u2; for (; n != 0; n--) { u1 = *s1; u2 = *s2; diff --git a/Objects/unicodetype_db.h b/Objects/unicodetype_db.h --- a/Objects/unicodetype_db.h +++ b/Objects/unicodetype_db.h @@ -4278,7 +4278,7 @@ /* Returns 1 for Unicode characters having the bidirectional * type 'WS', 'B' or 'S' or the category 'Zs', 0 otherwise. */ -int _PyUnicode_IsWhitespace(register const Py_UCS4 ch) +int _PyUnicode_IsWhitespace(const Py_UCS4 ch) { switch (ch) { case 0x0009: @@ -4320,7 +4320,7 @@ * property 'BK', 'CR', 'LF' or 'NL' or having bidirectional * type 'B', 0 otherwise. */ -int _PyUnicode_IsLinebreak(register const Py_UCS4 ch) +int _PyUnicode_IsLinebreak(const Py_UCS4 ch) { switch (ch) { case 0x000A: diff --git a/Parser/grammar1.c b/Parser/grammar1.c --- a/Parser/grammar1.c +++ b/Parser/grammar1.c @@ -9,9 +9,9 @@ /* Return the DFA for the given type */ dfa * -PyGrammar_FindDFA(grammar *g, register int type) +PyGrammar_FindDFA(grammar *g, int type) { - register dfa *d; + dfa *d; #if 1 /* Massive speed-up */ d = &g->g_dfa[type - NT_OFFSET]; @@ -19,7 +19,7 @@ return d; #else /* Old, slow version */ - register int i; + int i; for (i = g->g_ndfas, d = g->g_dfa; --i >= 0; d++) { if (d->d_type == type) diff --git a/Parser/node.c b/Parser/node.c --- a/Parser/node.c +++ b/Parser/node.c @@ -76,7 +76,7 @@ int -PyNode_AddChild(register node *n1, int type, char *str, int lineno, int col_offset) +PyNode_AddChild(node *n1, int type, char *str, int lineno, int col_offset) { const int nch = n1->n_nchildren; int current_capacity; diff --git a/Parser/parser.c b/Parser/parser.c --- a/Parser/parser.c +++ b/Parser/parser.c @@ -35,9 +35,9 @@ #define s_empty(s) ((s)->s_top == &(s)->s_base[MAXSTACK]) static int -s_push(register stack *s, dfa *d, node *parent) +s_push(stack *s, dfa *d, node *parent) { - register stackentry *top; + stackentry *top; if (s->s_top == s->s_base) { fprintf(stderr, "s_push: parser stack overflow\n"); return E_NOMEM; @@ -52,7 +52,7 @@ #ifdef Py_DEBUG static void -s_pop(register stack *s) +s_pop(stack *s) { if (s_empty(s)) Py_FatalError("s_pop: parser stack underflow -- FATAL"); @@ -105,7 +105,7 @@ /* PARSER STACK OPERATIONS */ static int -shift(register stack *s, int type, char *str, int newstate, int lineno, int col_offset) +shift(stack *s, int type, char *str, int newstate, int lineno, int col_offset) { int err; assert(!s_empty(s)); @@ -117,10 +117,10 @@ } static int -push(register stack *s, int type, dfa *d, int newstate, int lineno, int col_offset) +push(stack *s, int type, dfa *d, int newstate, int lineno, int col_offset) { int err; - register node *n; + node *n; n = s->s_top->s_parent; assert(!s_empty(s)); err = PyNode_AddChild(n, type, (char *)NULL, lineno, col_offset); @@ -137,12 +137,12 @@ classify(parser_state *ps, int type, char *str) { grammar *g = ps->p_grammar; - register int n = g->g_ll.ll_nlabels; + int n = g->g_ll.ll_nlabels; if (type == NAME) { - register char *s = str; - register label *l = g->g_ll.ll_label; - register int i; + char *s = str; + label *l = g->g_ll.ll_label; + int i; for (i = n; i > 0; i--, l++) { if (l->lb_type != NAME || l->lb_str == NULL || l->lb_str[0] != s[0] || @@ -165,8 +165,8 @@ } { - register label *l = g->g_ll.ll_label; - register int i; + label *l = g->g_ll.ll_label; + int i; for (i = n; i > 0; i--, l++) { if (l->lb_type == type && l->lb_str == NULL) { D(printf("It's a token we know\n")); @@ -225,10 +225,10 @@ #endif /* future keyword */ int -PyParser_AddToken(register parser_state *ps, register int type, char *str, +PyParser_AddToken(parser_state *ps, int type, char *str, int lineno, int col_offset, int *expected_ret) { - register int ilabel; + int ilabel; int err; D(printf("Token %s/'%s' ... ", _PyParser_TokenNames[type], str)); @@ -241,15 +241,15 @@ /* Loop until the token is shifted or an error occurred */ for (;;) { /* Fetch the current dfa and state */ - register dfa *d = ps->p_stack.s_top->s_dfa; - register state *s = &d->d_state[ps->p_stack.s_top->s_state]; + dfa *d = ps->p_stack.s_top->s_dfa; + state *s = &d->d_state[ps->p_stack.s_top->s_state]; D(printf(" DFA '%s', state %d:", d->d_name, ps->p_stack.s_top->s_state)); /* Check accelerator */ if (s->s_lower <= ilabel && ilabel < s->s_upper) { - register int x = s->s_accel[ilabel - s->s_lower]; + int x = s->s_accel[ilabel - s->s_lower]; if (x != -1) { if (x & (1<<7)) { /* Push non-terminal */ diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -874,7 +874,7 @@ /* Get next char, updating state; error code goes into tok->done */ static int -tok_nextc(register struct tok_state *tok) +tok_nextc(struct tok_state *tok) { for (;;) { if (tok->cur != tok->inp) { @@ -1071,7 +1071,7 @@ /* Back-up one character */ static void -tok_backup(register struct tok_state *tok, register int c) +tok_backup(struct tok_state *tok, int c) { if (c != EOF) { if (--tok->cur < tok->buf) @@ -1301,9 +1301,9 @@ /* Get next token, after space stripping etc. */ static int -tok_get(register struct tok_state *tok, char **p_start, char **p_end) +tok_get(struct tok_state *tok, char **p_start, char **p_end) { - register int c; + int c; int blankline, nonascii; *p_start = *p_end = NULL; @@ -1313,8 +1313,8 @@ /* Get indentation level */ if (tok->atbol) { - register int col = 0; - register int altcol = 0; + int col = 0; + int altcol = 0; tok->atbol = 0; for (;;) { c = tok_nextc(tok); diff --git a/Python/ceval.c b/Python/ceval.c --- a/Python/ceval.c +++ b/Python/ceval.c @@ -37,7 +37,7 @@ static void ppc_getcounter(uint64 *v) { - register unsigned long tbu, tb, tbu2; + unsigned long tbu, tb, tbu2; loop: asm volatile ("mftbu %0" : "=r" (tbu) ); @@ -792,12 +792,12 @@ #ifdef DXPAIRS int lastopcode = 0; #endif - register PyObject **stack_pointer; /* Next free slot in value stack */ - register unsigned char *next_instr; - register int opcode; /* Current opcode */ - register int oparg; /* Current opcode argument, if any */ - register enum why_code why; /* Reason for block stack unwind */ - register PyObject **fastlocals, **freevars; + PyObject **stack_pointer; /* Next free slot in value stack */ + unsigned char *next_instr; + int opcode; /* Current opcode */ + int oparg; /* Current opcode argument, if any */ + enum why_code why; /* Reason for block stack unwind */ + PyObject **fastlocals, **freevars; PyObject *retval = NULL; /* Return value */ PyThreadState *tstate = PyThreadState_GET(); PyCodeObject *co; @@ -3373,9 +3373,9 @@ PyObject **defs, int defcount, PyObject *kwdefs, PyObject *closure) { PyCodeObject* co = (PyCodeObject*)_co; - register PyFrameObject *f; - register PyObject *retval = NULL; - register PyObject **fastlocals, **freevars; + PyFrameObject *f; + PyObject *retval = NULL; + PyObject **fastlocals, **freevars; PyThreadState *tstate = PyThreadState_GET(); PyObject *x, *u; int total_args = co->co_argcount + co->co_kwonlyargcount; @@ -3895,7 +3895,7 @@ call_trace(Py_tracefunc func, PyObject *obj, PyFrameObject *frame, int what, PyObject *arg) { - register PyThreadState *tstate = frame->f_tstate; + PyThreadState *tstate = frame->f_tstate; int result; if (tstate->tracing) return 0; @@ -4581,7 +4581,7 @@ "BaseException is not allowed" static PyObject * -cmp_outcome(int op, register PyObject *v, register PyObject *w) +cmp_outcome(int op, PyObject *v, PyObject *w) { int res = 0; switch (op) { diff --git a/Python/codecs.c b/Python/codecs.c --- a/Python/codecs.c +++ b/Python/codecs.c @@ -53,7 +53,7 @@ static PyObject *normalizestring(const char *string) { - register size_t i; + size_t i; size_t len = strlen(string); char *p; PyObject *v; @@ -67,7 +67,7 @@ if (p == NULL) return PyErr_NoMemory(); for (i = 0; i < len; i++) { - register char ch = string[i]; + char ch = string[i]; if (ch == ' ') ch = '-'; else diff --git a/Python/dynload_aix.c b/Python/dynload_aix.c --- a/Python/dynload_aix.c +++ b/Python/dynload_aix.c @@ -30,11 +30,11 @@ static int aix_getoldmodules(void **modlistptr) { - register ModulePtr modptr, prevmodptr; - register struct ld_info *ldiptr; - register char *ldibuf; - register int errflag, bufsize = 1024; - register unsigned int offset; + ModulePtr modptr, prevmodptr; + struct ld_info *ldiptr; + char *ldibuf; + int errflag, bufsize = 1024; + unsigned int offset; char *progname = Py_GetProgramName(); /* @@ -106,7 +106,7 @@ char *message[1024], errbuf[1024]; PyObject *pathname_ob = NULL; PyObject *errbuf_ob = NULL; - register int i,j; + int i,j; struct errtab { int errNo; diff --git a/Python/marshal.c b/Python/marshal.c --- a/Python/marshal.c +++ b/Python/marshal.c @@ -613,7 +613,7 @@ static int r_short(RFILE *p) { - register short x; + short x; unsigned char buffer[2]; r_string((char *) buffer, 2, p); @@ -627,7 +627,7 @@ static long r_long(RFILE *p) { - register long x; + long x; unsigned char buffer[4]; r_string((char *) buffer, 4, p); diff --git a/Python/mystrtoul.c b/Python/mystrtoul.c --- a/Python/mystrtoul.c +++ b/Python/mystrtoul.c @@ -92,11 +92,11 @@ ** exceptions - we don't check for them. */ unsigned long -PyOS_strtoul(register char *str, char **ptr, int base) +PyOS_strtoul(char *str, char **ptr, int base) { - register unsigned long result = 0; /* return value of the function */ - register int c; /* current input character */ - register int ovlimit; /* required digits to overflow */ + unsigned long result = 0; /* return value of the function */ + int c; /* current input character */ + int ovlimit; /* required digits to overflow */ /* skip leading white space */ while (*str && Py_ISSPACE(Py_CHARMASK(*str))) @@ -213,7 +213,7 @@ if (ovlimit > 0) /* no overflow check required */ result = result * base + c; else { /* requires overflow check */ - register unsigned long temp_result; + unsigned long temp_result; if (ovlimit < 0) /* guaranteed overflow */ goto overflowed; diff --git a/Python/strdup.c b/Python/strdup.c --- a/Python/strdup.c +++ b/Python/strdup.c @@ -6,7 +6,7 @@ strdup(const char *str) { if (str != NULL) { - register char *copy = malloc(strlen(str) + 1); + char *copy = malloc(strlen(str) + 1); if (copy != NULL) return strcpy(copy, str); } diff --git a/Tools/unicode/makeunicodedata.py b/Tools/unicode/makeunicodedata.py --- a/Tools/unicode/makeunicodedata.py +++ b/Tools/unicode/makeunicodedata.py @@ -552,7 +552,7 @@ print("/* Returns 1 for Unicode characters having the bidirectional", file=fp) print(" * type 'WS', 'B' or 'S' or the category 'Zs', 0 otherwise.", file=fp) print(" */", file=fp) - print('int _PyUnicode_IsWhitespace(register const Py_UCS4 ch)', file=fp) + print('int _PyUnicode_IsWhitespace(const Py_UCS4 ch)', file=fp) print('{', file=fp) print(' switch (ch) {', file=fp) @@ -570,7 +570,7 @@ print(" * property 'BK', 'CR', 'LF' or 'NL' or having bidirectional", file=fp) print(" * type 'B', 0 otherwise.", file=fp) print(" */", file=fp) - print('int _PyUnicode_IsLinebreak(register const Py_UCS4 ch)', file=fp) + print('int _PyUnicode_IsLinebreak(const Py_UCS4 ch)', file=fp) print('{', file=fp) print(' switch (ch) {', file=fp) for codepoint in sorted(linebreaks): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 20:24:07 2013 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 13 Aug 2013 20:24:07 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_Add_NEWS_entry?= =?utf-8?q?_for_0f17aed78168_=28issue_=2316248=29?= Message-ID: <3cF2Pg08jRz7Ljx@mail.python.org> http://hg.python.org/cpython/rev/00bcf202cc3f changeset: 85158:00bcf202cc3f branch: 2.7 parent: 85154:49edf4cbc453 user: Antoine Pitrou date: Tue Aug 13 20:23:56 2013 +0200 summary: Add NEWS entry for 0f17aed78168 (issue #16248) files: Misc/NEWS | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,8 @@ Library ------- +- Fix tkinter regression introduced by the security fix in issue #16248. + - Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get docstrings and ValueError messages. Patch by Zhongyue Luo -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 22:26:12 2013 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 13 Aug 2013 22:26:12 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Remove_duplicate_text_in_3?= =?utf-8?q?=2E4_what=27s_new_=28my_bad=29?= Message-ID: <3cF56X6k05z7Ljh@mail.python.org> http://hg.python.org/cpython/rev/f07e4ebfca55 changeset: 85159:f07e4ebfca55 parent: 85157:e7f6cef7a4cc user: Antoine Pitrou date: Tue Aug 13 22:25:56 2013 +0200 summary: Remove duplicate text in 3.4 what's new (my bad) files: Doc/whatsnew/3.4.rst | 6 ------ 1 files changed, 0 insertions(+), 6 deletions(-) diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -109,9 +109,6 @@ * SHA-3 (Keccak) support for :mod:`hashlib`. * TLSv1.1 and TLSv1.2 support for :mod:`ssl`. -* SHA-3 (Keccak) support for :mod:`hashlib`. -* TLSv1.1 and TLSv1.2 support for :mod:`ssl`. - Security improvements: * command line option for :ref:`isolated mode `, @@ -124,9 +121,6 @@ PEP 445: Add new APIs to customize Python memory allocators =========================================================== -PEP 445: Add new APIs to customize Python memory allocators -=========================================================== - The :pep:`445` adds new Application Programming Interfaces (API) to customize Python memory allocators. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 13 23:04:27 2013 From: python-checkins at python.org (brett.cannon) Date: Tue, 13 Aug 2013 23:04:27 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Fix_a_typo?= Message-ID: <3cF5yg1Qklz7LkR@mail.python.org> http://hg.python.org/peps/rev/6e7708f29c04 changeset: 5055:6e7708f29c04 user: Brett Cannon date: Tue Aug 13 17:04:22 2013 -0400 summary: Fix a typo files: pep-0448.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0448.txt b/pep-0448.txt --- a/pep-0448.txt +++ b/pep-0448.txt @@ -25,7 +25,7 @@ >>> print(*[1], *[2], 3) 1 2 3 - >>> dict(**{'x': 1}, y=3, **{'z': 2}) + >>> dict(**{'x': 1}, y=2, **{'z': 3}) {'x': 1, 'y': 2, 'z': 3} Function calls currently have the restriction that keyword arguments -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Aug 14 01:28:41 2013 From: python-checkins at python.org (victor.stinner) Date: Wed, 14 Aug 2013 01:28:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Close_=2312015=3A_The_temp?= =?utf-8?q?file_module_now_uses_a_suffix_of_8_random_characters?= Message-ID: <3cF99516lsz7Lkf@mail.python.org> http://hg.python.org/cpython/rev/de5077aca668 changeset: 85160:de5077aca668 user: Victor Stinner date: Wed Aug 14 01:28:28 2013 +0200 summary: Close #12015: The tempfile module now uses a suffix of 8 random characters instead of 6, to reduce the risk of filename collision. The entropy was reduced when uppercase letters were removed from the charset used to generate random characters. files: Lib/tempfile.py | 2 +- Lib/test/test_tempfile.py | 4 ++-- Misc/NEWS | 5 +++++ 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/Lib/tempfile.py b/Lib/tempfile.py --- a/Lib/tempfile.py +++ b/Lib/tempfile.py @@ -125,7 +125,7 @@ def __next__(self): c = self.characters choose = self.rng.choice - letters = [choose(c) for dummy in "123456"] + letters = [choose(c) for dummy in range(8)] return ''.join(letters) def _candidate_tempdir_list(): diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py --- a/Lib/test/test_tempfile.py +++ b/Lib/test/test_tempfile.py @@ -35,7 +35,7 @@ # Common functionality. class BaseTestCase(unittest.TestCase): - str_check = re.compile(r"[a-zA-Z0-9_-]{6}$") + str_check = re.compile(r"^[a-z0-9_-]{8}$") def setUp(self): self._warnings_manager = support.check_warnings() @@ -62,7 +62,7 @@ nbase = nbase[len(pre):len(nbase)-len(suf)] self.assertTrue(self.str_check.match(nbase), - "random string '%s' does not match /^[a-zA-Z0-9_-]{6}$/" + "random string '%s' does not match ^[a-z0-9_-]{8}$" % nbase) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -28,6 +28,11 @@ Library ------- +- Issue #12015: The tempfile module now uses a suffix of 8 random characters + instead of 6, to reduce the risk of filename collision. The entropy was + reduced when uppercase letters were removed from the charset used to generate + random characters. + - Issue #18585: Add :func:`textwrap.shorten` to collapse and truncate a piece of text to a given length. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 01:40:58 2013 From: python-checkins at python.org (victor.stinner) Date: Wed, 14 Aug 2013 01:40:58 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NDA1?= =?utf-8?q?=3A_Improve_the_entropy_of_crypt=2Emksalt=28=29=2E?= Message-ID: <3cF9RG1z4mz7Lkf@mail.python.org> http://hg.python.org/cpython/rev/e8a314fe248b changeset: 85161:e8a314fe248b branch: 3.3 parent: 85155:49fa63610c7f user: Victor Stinner date: Wed Aug 14 01:39:14 2013 +0200 summary: Issue #18405: Improve the entropy of crypt.mksalt(). files: Lib/crypt.py | 2 +- Misc/NEWS | 2 ++ 2 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Lib/crypt.py b/Lib/crypt.py --- a/Lib/crypt.py +++ b/Lib/crypt.py @@ -28,7 +28,7 @@ if method is None: method = methods[0] s = '${}$'.format(method.ident) if method.ident else '' - s += ''.join(_sr.sample(_saltchars, method.salt_chars)) + s += ''.join(_sr.choice(_saltchars) for char in range(method.salt_chars)) return s diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,8 @@ Library ------- +- Issue #18405: Improve the entropy of crypt.mksalt(). + - Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get docstrings and ValueError messages. Patch by Zhongyue Luo -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 01:40:59 2013 From: python-checkins at python.org (victor.stinner) Date: Wed, 14 Aug 2013 01:40:59 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_=28Merge_3=2E3=29_Issue_=2318405=3A_Improve_the_entropy_?= =?utf-8?b?b2YgY3J5cHQubWtzYWx0KCku?= Message-ID: <3cF9RH3x28z7Lkk@mail.python.org> http://hg.python.org/cpython/rev/122e074c56f7 changeset: 85162:122e074c56f7 parent: 85160:de5077aca668 parent: 85161:e8a314fe248b user: Victor Stinner date: Wed Aug 14 01:40:46 2013 +0200 summary: (Merge 3.3) Issue #18405: Improve the entropy of crypt.mksalt(). files: Lib/crypt.py | 2 +- Misc/NEWS | 2 ++ 2 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Lib/crypt.py b/Lib/crypt.py --- a/Lib/crypt.py +++ b/Lib/crypt.py @@ -28,7 +28,7 @@ if method is None: method = methods[0] s = '${}$'.format(method.ident) if method.ident else '' - s += ''.join(_sr.sample(_saltchars, method.salt_chars)) + s += ''.join(_sr.choice(_saltchars) for char in range(method.salt_chars)) return s diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -28,6 +28,8 @@ Library ------- +- Issue #18405: Improve the entropy of crypt.mksalt(). + - Issue #12015: The tempfile module now uses a suffix of 8 random characters instead of 6, to reduce the risk of filename collision. The entropy was reduced when uppercase letters were removed from the charset used to generate -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 01:52:20 2013 From: python-checkins at python.org (terry.reedy) Date: Wed, 14 Aug 2013 01:52:20 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NDI1?= =?utf-8?q?=3A_Add_docstrings_to_IdleHistory=2Epy=2E__Remove_redundant_=27?= =?utf-8?b?aGlzdG9yeV8n?= Message-ID: <3cF9hN1sFYz7Lk5@mail.python.org> http://hg.python.org/cpython/rev/0bb9346665e9 changeset: 85163:0bb9346665e9 branch: 3.3 parent: 85161:e8a314fe248b user: Terry Jan Reedy date: Tue Aug 13 19:51:04 2013 -0400 summary: Issue #18425: Add docstrings to IdleHistory.py. Remove redundant 'history_' prefix from two attributes and two methods of History class. files: Lib/idlelib/IdleHistory.py | 48 ++++++++++++++++++------- Lib/idlelib/PyShell.py | 2 +- 2 files changed, 35 insertions(+), 15 deletions(-) diff --git a/Lib/idlelib/IdleHistory.py b/Lib/idlelib/IdleHistory.py --- a/Lib/idlelib/IdleHistory.py +++ b/Lib/idlelib/IdleHistory.py @@ -1,23 +1,41 @@ +"Implement Idle Shell history mechanism with History class" + from idlelib.configHandler import idleConf class History: + ''' Implement Idle Shell history mechanism. + store - Store source statement (called from PyShell.resetoutput). + fetch - Fetch stored statement matching prefix already entered. + history_next - Bound to <> event (default Alt-N). + history_prev - Bound to <> event (default Alt-P). + ''' def __init__(self, text, output_sep = "\n"): + '''Initialize data attributes and bind event methods. + + .text - Idle wrapper of tk Text widget, with .bell(). + .history - source statements, possibly with multiple lines. + .prefix - source already entered at prompt; filters history list. + .pointer - index into history. + .cyclic - wrap around history list (or not). + ''' self.text = text self.history = [] - self.history_prefix = None - self.history_pointer = None + self.prefix = None + self.pointer = None self.output_sep = output_sep self.cyclic = idleConf.GetOption("main", "History", "cyclic", 1, "bool") text.bind("<>", self.history_prev) text.bind("<>", self.history_next) def history_next(self, event): - self.history_do(0) + "Fetch later statement; start with ealiest if cyclic." + self.fetch(reverse=False) return "break" def history_prev(self, event): - self.history_do(1) + "Fetch earlier statement; start with most recent." + self.fetch(reverse=True) return "break" def _get_source(self, start, end): @@ -30,10 +48,11 @@ output = self.output_sep.join(source.split("\n")) self.text.insert(where, output) - def history_do(self, reverse): + def fetch(self, reverse): + "Fetch statememt and enter into text at cursor." nhist = len(self.history) - pointer = self.history_pointer - prefix = self.history_prefix + pointer = self.pointer + prefix = self.prefix if pointer is not None and prefix is not None: if self.text.compare("insert", "!=", "end-1c") or \ self._get_source("iomark", "end-1c") != self.history[pointer]: @@ -41,10 +60,10 @@ if pointer is None or prefix is None: prefix = self._get_source("iomark", "end-1c") if reverse: - pointer = nhist + pointer = nhist # will be decremented else: if self.cyclic: - pointer = -1 + pointer = -1 # will be incremented else: self.text.bell() return @@ -72,10 +91,11 @@ self.text.mark_set("insert", "end-1c") self.text.see("insert") self.text.tag_remove("sel", "1.0", "end") - self.history_pointer = pointer - self.history_prefix = prefix + self.pointer = pointer + self.prefix = prefix - def history_store(self, source): + def store(self, source): + "Store Shell input statement into history list." source = source.strip() if len(source) > 2: # avoid duplicates @@ -84,5 +104,5 @@ except ValueError: pass self.history.append(source) - self.history_pointer = None - self.history_prefix = None + self.pointer = None + self.prefix = None diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py --- a/Lib/idlelib/PyShell.py +++ b/Lib/idlelib/PyShell.py @@ -1261,7 +1261,7 @@ def resetoutput(self): source = self.text.get("iomark", "end-1c") if self.history: - self.history.history_store(source) + self.history.store(source) if self.text.get("end-2c") != "\n": self.text.insert("end-1c", "\n") self.text.mark_set("iomark", "end-1c") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 01:52:21 2013 From: python-checkins at python.org (terry.reedy) Date: Wed, 14 Aug 2013 01:52:21 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_with_3=2E3?= Message-ID: <3cF9hP4kJMz7LkR@mail.python.org> http://hg.python.org/cpython/rev/b0cc8c9ab912 changeset: 85164:b0cc8c9ab912 parent: 85162:122e074c56f7 parent: 85163:0bb9346665e9 user: Terry Jan Reedy date: Tue Aug 13 19:51:29 2013 -0400 summary: Merge with 3.3 files: Lib/idlelib/IdleHistory.py | 48 ++++++++++++++++++------- Lib/idlelib/PyShell.py | 2 +- 2 files changed, 35 insertions(+), 15 deletions(-) diff --git a/Lib/idlelib/IdleHistory.py b/Lib/idlelib/IdleHistory.py --- a/Lib/idlelib/IdleHistory.py +++ b/Lib/idlelib/IdleHistory.py @@ -1,23 +1,41 @@ +"Implement Idle Shell history mechanism with History class" + from idlelib.configHandler import idleConf class History: + ''' Implement Idle Shell history mechanism. + store - Store source statement (called from PyShell.resetoutput). + fetch - Fetch stored statement matching prefix already entered. + history_next - Bound to <> event (default Alt-N). + history_prev - Bound to <> event (default Alt-P). + ''' def __init__(self, text, output_sep = "\n"): + '''Initialize data attributes and bind event methods. + + .text - Idle wrapper of tk Text widget, with .bell(). + .history - source statements, possibly with multiple lines. + .prefix - source already entered at prompt; filters history list. + .pointer - index into history. + .cyclic - wrap around history list (or not). + ''' self.text = text self.history = [] - self.history_prefix = None - self.history_pointer = None + self.prefix = None + self.pointer = None self.output_sep = output_sep self.cyclic = idleConf.GetOption("main", "History", "cyclic", 1, "bool") text.bind("<>", self.history_prev) text.bind("<>", self.history_next) def history_next(self, event): - self.history_do(0) + "Fetch later statement; start with ealiest if cyclic." + self.fetch(reverse=False) return "break" def history_prev(self, event): - self.history_do(1) + "Fetch earlier statement; start with most recent." + self.fetch(reverse=True) return "break" def _get_source(self, start, end): @@ -30,10 +48,11 @@ output = self.output_sep.join(source.split("\n")) self.text.insert(where, output) - def history_do(self, reverse): + def fetch(self, reverse): + "Fetch statememt and enter into text at cursor." nhist = len(self.history) - pointer = self.history_pointer - prefix = self.history_prefix + pointer = self.pointer + prefix = self.prefix if pointer is not None and prefix is not None: if self.text.compare("insert", "!=", "end-1c") or \ self._get_source("iomark", "end-1c") != self.history[pointer]: @@ -41,10 +60,10 @@ if pointer is None or prefix is None: prefix = self._get_source("iomark", "end-1c") if reverse: - pointer = nhist + pointer = nhist # will be decremented else: if self.cyclic: - pointer = -1 + pointer = -1 # will be incremented else: self.text.bell() return @@ -72,10 +91,11 @@ self.text.mark_set("insert", "end-1c") self.text.see("insert") self.text.tag_remove("sel", "1.0", "end") - self.history_pointer = pointer - self.history_prefix = prefix + self.pointer = pointer + self.prefix = prefix - def history_store(self, source): + def store(self, source): + "Store Shell input statement into history list." source = source.strip() if len(source) > 2: # avoid duplicates @@ -84,5 +104,5 @@ except ValueError: pass self.history.append(source) - self.history_pointer = None - self.history_prefix = None + self.pointer = None + self.prefix = None diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py --- a/Lib/idlelib/PyShell.py +++ b/Lib/idlelib/PyShell.py @@ -1264,7 +1264,7 @@ def resetoutput(self): source = self.text.get("iomark", "end-1c") if self.history: - self.history.history_store(source) + self.history.store(source) if self.text.get("end-2c") != "\n": self.text.insert("end-1c", "\n") self.text.mark_set("iomark", "end-1c") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 01:52:23 2013 From: python-checkins at python.org (terry.reedy) Date: Wed, 14 Aug 2013 01:52:23 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NDI1?= =?utf-8?q?=3A_Add_docstrings_to_IdleHistory=2Epy=2E__Remove_redundant_=27?= =?utf-8?b?aGlzdG9yeV8n?= Message-ID: <3cF9hR0bJKz7LkK@mail.python.org> http://hg.python.org/cpython/rev/22d7c755163d changeset: 85165:22d7c755163d branch: 2.7 parent: 85158:00bcf202cc3f user: Terry Jan Reedy date: Tue Aug 13 19:51:04 2013 -0400 summary: Issue #18425: Add docstrings to IdleHistory.py. Remove redundant 'history_' prefix from two attributes and two methods of History class. files: Lib/idlelib/IdleHistory.py | 48 ++++++++++++++++++------- Lib/idlelib/PyShell.py | 2 +- 2 files changed, 35 insertions(+), 15 deletions(-) diff --git a/Lib/idlelib/IdleHistory.py b/Lib/idlelib/IdleHistory.py --- a/Lib/idlelib/IdleHistory.py +++ b/Lib/idlelib/IdleHistory.py @@ -1,23 +1,41 @@ +"Implement Idle Shell history mechanism with History class" + from idlelib.configHandler import idleConf class History: + ''' Implement Idle Shell history mechanism. + store - Store source statement (called from PyShell.resetoutput). + fetch - Fetch stored statement matching prefix already entered. + history_next - Bound to <> event (default Alt-N). + history_prev - Bound to <> event (default Alt-P). + ''' def __init__(self, text, output_sep = "\n"): + '''Initialize data attributes and bind event methods. + + .text - Idle wrapper of tk Text widget, with .bell(). + .history - source statements, possibly with multiple lines. + .prefix - source already entered at prompt; filters history list. + .pointer - index into history. + .cyclic - wrap around history list (or not). + ''' self.text = text self.history = [] - self.history_prefix = None - self.history_pointer = None + self.prefix = None + self.pointer = None self.output_sep = output_sep self.cyclic = idleConf.GetOption("main", "History", "cyclic", 1, "bool") text.bind("<>", self.history_prev) text.bind("<>", self.history_next) def history_next(self, event): - self.history_do(0) + "Fetch later statement; start with ealiest if cyclic." + self.fetch(reverse=False) return "break" def history_prev(self, event): - self.history_do(1) + "Fetch earlier statement; start with most recent." + self.fetch(reverse=True) return "break" def _get_source(self, start, end): @@ -30,10 +48,11 @@ output = self.output_sep.join(source.split("\n")) self.text.insert(where, output) - def history_do(self, reverse): + def fetch(self, reverse): + "Fetch statememt and enter into text at cursor." nhist = len(self.history) - pointer = self.history_pointer - prefix = self.history_prefix + pointer = self.pointer + prefix = self.prefix if pointer is not None and prefix is not None: if self.text.compare("insert", "!=", "end-1c") or \ self._get_source("iomark", "end-1c") != self.history[pointer]: @@ -41,10 +60,10 @@ if pointer is None or prefix is None: prefix = self._get_source("iomark", "end-1c") if reverse: - pointer = nhist + pointer = nhist # will be decremented else: if self.cyclic: - pointer = -1 + pointer = -1 # will be incremented else: self.text.bell() return @@ -72,10 +91,11 @@ self.text.mark_set("insert", "end-1c") self.text.see("insert") self.text.tag_remove("sel", "1.0", "end") - self.history_pointer = pointer - self.history_prefix = prefix + self.pointer = pointer + self.prefix = prefix - def history_store(self, source): + def store(self, source): + "Store Shell input statement into history list." source = source.strip() if len(source) > 2: # avoid duplicates @@ -84,5 +104,5 @@ except ValueError: pass self.history.append(source) - self.history_pointer = None - self.history_prefix = None + self.pointer = None + self.prefix = None diff --git a/Lib/idlelib/PyShell.py b/Lib/idlelib/PyShell.py --- a/Lib/idlelib/PyShell.py +++ b/Lib/idlelib/PyShell.py @@ -1278,7 +1278,7 @@ def resetoutput(self): source = self.text.get("iomark", "end-1c") if self.history: - self.history.history_store(source) + self.history.store(source) if self.text.get("end-2c") != "\n": self.text.insert("end-1c", "\n") self.text.mark_set("iomark", "end-1c") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 03:21:14 2013 From: python-checkins at python.org (raymond.hettinger) Date: Wed, 14 Aug 2013 03:21:14 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgMTg3MTk6?= =?utf-8?q?_Remove_a_false_optimization?= Message-ID: <3cFCfy4T3Mz7LkF@mail.python.org> http://hg.python.org/cpython/rev/8f9bc9283400 changeset: 85166:8f9bc9283400 branch: 3.3 parent: 85163:0bb9346665e9 user: Raymond Hettinger date: Tue Aug 13 18:16:34 2013 -0700 summary: Issue 18719: Remove a false optimization Remove an unused early-out test from the critical path for dict and set lookups. When the strings already have matching lengths, kinds, and hashes, there is no additional information gained by checking the first characters (the probability of a mismatch is already known to be less than 1 in 2**64). files: Objects/stringlib/eq.h | 9 --------- 1 files changed, 0 insertions(+), 9 deletions(-) diff --git a/Objects/stringlib/eq.h b/Objects/stringlib/eq.h --- a/Objects/stringlib/eq.h +++ b/Objects/stringlib/eq.h @@ -20,15 +20,6 @@ return 1; if (PyUnicode_KIND(a) != PyUnicode_KIND(b)) return 0; - /* Just comparing the first byte is enough to see if a and b differ. - * If they are 2 byte or 4 byte character most differences will happen in - * the lower bytes anyways. - */ - if (PyUnicode_1BYTE_DATA(a)[0] != PyUnicode_1BYTE_DATA(b)[0]) - return 0; - if (PyUnicode_KIND(a) == PyUnicode_1BYTE_KIND && - PyUnicode_GET_LENGTH(a) == 1) - return 1; return memcmp(PyUnicode_1BYTE_DATA(a), PyUnicode_1BYTE_DATA(b), PyUnicode_GET_LENGTH(a) * PyUnicode_KIND(a)) == 0; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 03:21:15 2013 From: python-checkins at python.org (raymond.hettinger) Date: Wed, 14 Aug 2013 03:21:15 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_merge?= Message-ID: <3cFCfz6N6sz7Lkc@mail.python.org> http://hg.python.org/cpython/rev/55c55bfe237b changeset: 85167:55c55bfe237b parent: 85164:b0cc8c9ab912 parent: 85166:8f9bc9283400 user: Raymond Hettinger date: Tue Aug 13 18:20:55 2013 -0700 summary: merge files: Objects/stringlib/eq.h | 9 --------- 1 files changed, 0 insertions(+), 9 deletions(-) diff --git a/Objects/stringlib/eq.h b/Objects/stringlib/eq.h --- a/Objects/stringlib/eq.h +++ b/Objects/stringlib/eq.h @@ -20,15 +20,6 @@ return 1; if (PyUnicode_KIND(a) != PyUnicode_KIND(b)) return 0; - /* Just comparing the first byte is enough to see if a and b differ. - * If they are 2 byte or 4 byte character most differences will happen in - * the lower bytes anyways. - */ - if (PyUnicode_1BYTE_DATA(a)[0] != PyUnicode_1BYTE_DATA(b)[0]) - return 0; - if (PyUnicode_KIND(a) == PyUnicode_1BYTE_KIND && - PyUnicode_GET_LENGTH(a) == 1) - return 1; return memcmp(PyUnicode_1BYTE_DATA(a), PyUnicode_1BYTE_DATA(b), PyUnicode_GET_LENGTH(a) * PyUnicode_KIND(a)) == 0; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 03:35:13 2013 From: python-checkins at python.org (raymond.hettinger) Date: Wed, 14 Aug 2013 03:35:13 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgMTg3MTk6?= =?utf-8?q?_Remove_a_false_optimization?= Message-ID: <3cFCz51MrKz7LlJ@mail.python.org> http://hg.python.org/cpython/rev/ac2f59a6637f changeset: 85168:ac2f59a6637f branch: 2.7 parent: 85165:22d7c755163d user: Raymond Hettinger date: Tue Aug 13 18:34:49 2013 -0700 summary: Issue 18719: Remove a false optimization Remove an unused early-out test from the critical path for dict and set lookups. When the strings already have matching lengths and hashes, there is no additional information gained by checking the first characters (the probability of a mismatch is already known to be less than 1 in 2**64). files: Objects/stringobject.c | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Objects/stringobject.c b/Objects/stringobject.c --- a/Objects/stringobject.c +++ b/Objects/stringobject.c @@ -1255,7 +1255,6 @@ PyStringObject *a = (PyStringObject*) o1; PyStringObject *b = (PyStringObject*) o2; return Py_SIZE(a) == Py_SIZE(b) - && *a->ob_sval == *b->ob_sval && memcmp(a->ob_sval, b->ob_sval, Py_SIZE(a)) == 0; } -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Wed Aug 14 05:54:30 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Wed, 14 Aug 2013 05:54:30 +0200 Subject: [Python-checkins] Daily reference leaks (55c55bfe237b): sum=0 Message-ID: results for 55c55bfe237b on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogs0shXc', '-x'] From python-checkins at python.org Wed Aug 14 16:49:01 2013 From: python-checkins at python.org (richard.oudkerk) Date: Wed, 14 Aug 2013 16:49:01 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=238713=3A_Support_a?= =?utf-8?q?lternative_start_methods_in_multiprocessing_on_Unix=2E?= Message-ID: <3cFYb16pbLz7Lk8@mail.python.org> http://hg.python.org/cpython/rev/3b82e0d83bf9 changeset: 85169:3b82e0d83bf9 parent: 85167:55c55bfe237b user: Richard Oudkerk date: Wed Aug 14 15:35:41 2013 +0100 summary: Issue #8713: Support alternative start methods in multiprocessing on Unix. See http://hg.python.org/sandbox/sbt#spawn files: Doc/includes/mp_benchmarks.py | 239 ---- Doc/includes/mp_newtype.py | 14 +- Doc/includes/mp_pool.py | 337 +----- Doc/includes/mp_synchronize.py | 278 ----- Doc/includes/mp_webserver.py | 70 - Doc/includes/mp_workers.py | 13 - Doc/library/multiprocessing.rst | 246 +++- Doc/whatsnew/3.4.rst | 13 + Lib/multiprocessing/__init__.py | 107 +- Lib/multiprocessing/connection.py | 61 +- Lib/multiprocessing/dummy/__init__.py | 4 +- Lib/multiprocessing/forking.py | 477 --------- Lib/multiprocessing/forkserver.py | 238 ++++ Lib/multiprocessing/heap.py | 56 +- Lib/multiprocessing/managers.py | 44 +- Lib/multiprocessing/pool.py | 84 +- Lib/multiprocessing/popen.py | 78 + Lib/multiprocessing/popen_fork.py | 87 + Lib/multiprocessing/popen_forkserver.py | 75 + Lib/multiprocessing/popen_spawn_posix.py | 75 + Lib/multiprocessing/popen_spawn_win32.py | 102 ++ Lib/multiprocessing/process.py | 60 +- Lib/multiprocessing/queues.py | 36 +- Lib/multiprocessing/reduction.py | 361 +++---- Lib/multiprocessing/resource_sharer.py | 158 +++ Lib/multiprocessing/semaphore_tracker.py | 135 ++ Lib/multiprocessing/sharedctypes.py | 7 +- Lib/multiprocessing/spawn.py | 258 +++++ Lib/multiprocessing/synchronize.py | 73 +- Lib/multiprocessing/util.py | 70 +- Lib/test/test_multiprocessing.py | 495 ++++++--- Lib/test/mp_fork_bomb.py | 5 + Lib/test/regrtest.py | 2 +- Lib/test/test_multiprocessing_fork.py | 7 + Lib/test/test_multiprocessing_forkserver.py | 7 + Lib/test/test_multiprocessing_spawn.py | 7 + Makefile.pre.in | 4 +- Modules/_multiprocessing/multiprocessing.c | 1 + Modules/_multiprocessing/multiprocessing.h | 1 + Modules/_multiprocessing/semaphore.c | 78 +- 40 files changed, 2442 insertions(+), 2021 deletions(-) diff --git a/Doc/includes/mp_benchmarks.py b/Doc/includes/mp_benchmarks.py deleted file mode 100644 --- a/Doc/includes/mp_benchmarks.py +++ /dev/null @@ -1,239 +0,0 @@ -# -# Simple benchmarks for the multiprocessing package -# -# Copyright (c) 2006-2008, R Oudkerk -# All rights reserved. -# - -import time -import multiprocessing -import threading -import queue -import gc - -_timer = time.perf_counter - -delta = 1 - - -#### TEST_QUEUESPEED - -def queuespeed_func(q, c, iterations): - a = '0' * 256 - c.acquire() - c.notify() - c.release() - - for i in range(iterations): - q.put(a) - - q.put('STOP') - -def test_queuespeed(Process, q, c): - elapsed = 0 - iterations = 1 - - while elapsed < delta: - iterations *= 2 - - p = Process(target=queuespeed_func, args=(q, c, iterations)) - c.acquire() - p.start() - c.wait() - c.release() - - result = None - t = _timer() - - while result != 'STOP': - result = q.get() - - elapsed = _timer() - t - - p.join() - - print(iterations, 'objects passed through the queue in', elapsed, 'seconds') - print('average number/sec:', iterations/elapsed) - - -#### TEST_PIPESPEED - -def pipe_func(c, cond, iterations): - a = '0' * 256 - cond.acquire() - cond.notify() - cond.release() - - for i in range(iterations): - c.send(a) - - c.send('STOP') - -def test_pipespeed(): - c, d = multiprocessing.Pipe() - cond = multiprocessing.Condition() - elapsed = 0 - iterations = 1 - - while elapsed < delta: - iterations *= 2 - - p = multiprocessing.Process(target=pipe_func, - args=(d, cond, iterations)) - cond.acquire() - p.start() - cond.wait() - cond.release() - - result = None - t = _timer() - - while result != 'STOP': - result = c.recv() - - elapsed = _timer() - t - p.join() - - print(iterations, 'objects passed through connection in',elapsed,'seconds') - print('average number/sec:', iterations/elapsed) - - -#### TEST_SEQSPEED - -def test_seqspeed(seq): - elapsed = 0 - iterations = 1 - - while elapsed < delta: - iterations *= 2 - - t = _timer() - - for i in range(iterations): - a = seq[5] - - elapsed = _timer() - t - - print(iterations, 'iterations in', elapsed, 'seconds') - print('average number/sec:', iterations/elapsed) - - -#### TEST_LOCK - -def test_lockspeed(l): - elapsed = 0 - iterations = 1 - - while elapsed < delta: - iterations *= 2 - - t = _timer() - - for i in range(iterations): - l.acquire() - l.release() - - elapsed = _timer() - t - - print(iterations, 'iterations in', elapsed, 'seconds') - print('average number/sec:', iterations/elapsed) - - -#### TEST_CONDITION - -def conditionspeed_func(c, N): - c.acquire() - c.notify() - - for i in range(N): - c.wait() - c.notify() - - c.release() - -def test_conditionspeed(Process, c): - elapsed = 0 - iterations = 1 - - while elapsed < delta: - iterations *= 2 - - c.acquire() - p = Process(target=conditionspeed_func, args=(c, iterations)) - p.start() - - c.wait() - - t = _timer() - - for i in range(iterations): - c.notify() - c.wait() - - elapsed = _timer() - t - - c.release() - p.join() - - print(iterations * 2, 'waits in', elapsed, 'seconds') - print('average number/sec:', iterations * 2 / elapsed) - -#### - -def test(): - manager = multiprocessing.Manager() - - gc.disable() - - print('\n\t######## testing Queue.Queue\n') - test_queuespeed(threading.Thread, queue.Queue(), - threading.Condition()) - print('\n\t######## testing multiprocessing.Queue\n') - test_queuespeed(multiprocessing.Process, multiprocessing.Queue(), - multiprocessing.Condition()) - print('\n\t######## testing Queue managed by server process\n') - test_queuespeed(multiprocessing.Process, manager.Queue(), - manager.Condition()) - print('\n\t######## testing multiprocessing.Pipe\n') - test_pipespeed() - - print() - - print('\n\t######## testing list\n') - test_seqspeed(list(range(10))) - print('\n\t######## testing list managed by server process\n') - test_seqspeed(manager.list(list(range(10)))) - print('\n\t######## testing Array("i", ..., lock=False)\n') - test_seqspeed(multiprocessing.Array('i', list(range(10)), lock=False)) - print('\n\t######## testing Array("i", ..., lock=True)\n') - test_seqspeed(multiprocessing.Array('i', list(range(10)), lock=True)) - - print() - - print('\n\t######## testing threading.Lock\n') - test_lockspeed(threading.Lock()) - print('\n\t######## testing threading.RLock\n') - test_lockspeed(threading.RLock()) - print('\n\t######## testing multiprocessing.Lock\n') - test_lockspeed(multiprocessing.Lock()) - print('\n\t######## testing multiprocessing.RLock\n') - test_lockspeed(multiprocessing.RLock()) - print('\n\t######## testing lock managed by server process\n') - test_lockspeed(manager.Lock()) - print('\n\t######## testing rlock managed by server process\n') - test_lockspeed(manager.RLock()) - - print() - - print('\n\t######## testing threading.Condition\n') - test_conditionspeed(threading.Thread, threading.Condition()) - print('\n\t######## testing multiprocessing.Condition\n') - test_conditionspeed(multiprocessing.Process, multiprocessing.Condition()) - print('\n\t######## testing condition managed by a server process\n') - test_conditionspeed(multiprocessing.Process, manager.Condition()) - - gc.enable() - -if __name__ == '__main__': - multiprocessing.freeze_support() - test() diff --git a/Doc/includes/mp_newtype.py b/Doc/includes/mp_newtype.py --- a/Doc/includes/mp_newtype.py +++ b/Doc/includes/mp_newtype.py @@ -1,11 +1,3 @@ -# -# This module shows how to use arbitrary callables with a subclass of -# `BaseManager`. -# -# Copyright (c) 2006-2008, R Oudkerk -# All rights reserved. -# - from multiprocessing import freeze_support from multiprocessing.managers import BaseManager, BaseProxy import operator @@ -27,12 +19,10 @@ # Proxy type for generator objects class GeneratorProxy(BaseProxy): - _exposed_ = ('next', '__next__') + _exposed_ = ['__next__'] def __iter__(self): return self def __next__(self): - return self._callmethod('next') - def __next__(self): return self._callmethod('__next__') # Function to return the operator module @@ -90,8 +80,6 @@ op = manager.operator() print('op.add(23, 45) =', op.add(23, 45)) print('op.pow(2, 94) =', op.pow(2, 94)) - print('op.getslice(range(10), 2, 6) =', op.getslice(list(range(10)), 2, 6)) - print('op.repeat(range(5), 3) =', op.repeat(list(range(5)), 3)) print('op._exposed_ =', op._exposed_) ## diff --git a/Doc/includes/mp_pool.py b/Doc/includes/mp_pool.py --- a/Doc/includes/mp_pool.py +++ b/Doc/includes/mp_pool.py @@ -1,10 +1,3 @@ -# -# A test of `multiprocessing.Pool` class -# -# Copyright (c) 2006-2008, R Oudkerk -# All rights reserved. -# - import multiprocessing import time import random @@ -46,269 +39,115 @@ # def test(): - print('cpu_count() = %d\n' % multiprocessing.cpu_count()) - - # - # Create pool - # - PROCESSES = 4 print('Creating pool with %d processes\n' % PROCESSES) - pool = multiprocessing.Pool(PROCESSES) - print('pool = %s' % pool) - print() - # - # Tests - # + with multiprocessing.Pool(PROCESSES) as pool: + # + # Tests + # - TASKS = [(mul, (i, 7)) for i in range(10)] + \ - [(plus, (i, 8)) for i in range(10)] + TASKS = [(mul, (i, 7)) for i in range(10)] + \ + [(plus, (i, 8)) for i in range(10)] - results = [pool.apply_async(calculate, t) for t in TASKS] - imap_it = pool.imap(calculatestar, TASKS) - imap_unordered_it = pool.imap_unordered(calculatestar, TASKS) + results = [pool.apply_async(calculate, t) for t in TASKS] + imap_it = pool.imap(calculatestar, TASKS) + imap_unordered_it = pool.imap_unordered(calculatestar, TASKS) - print('Ordered results using pool.apply_async():') - for r in results: - print('\t', r.get()) - print() + print('Ordered results using pool.apply_async():') + for r in results: + print('\t', r.get()) + print() - print('Ordered results using pool.imap():') - for x in imap_it: - print('\t', x) - print() + print('Ordered results using pool.imap():') + for x in imap_it: + print('\t', x) + print() - print('Unordered results using pool.imap_unordered():') - for x in imap_unordered_it: - print('\t', x) - print() + print('Unordered results using pool.imap_unordered():') + for x in imap_unordered_it: + print('\t', x) + print() - print('Ordered results using pool.map() --- will block till complete:') - for x in pool.map(calculatestar, TASKS): - print('\t', x) - print() + print('Ordered results using pool.map() --- will block till complete:') + for x in pool.map(calculatestar, TASKS): + print('\t', x) + print() - # - # Simple benchmarks - # + # + # Test error handling + # - N = 100000 - print('def pow3(x): return x**3') + print('Testing error handling:') - t = time.time() - A = list(map(pow3, range(N))) - print('\tmap(pow3, range(%d)):\n\t\t%s seconds' % \ - (N, time.time() - t)) + try: + print(pool.apply(f, (5,))) + except ZeroDivisionError: + print('\tGot ZeroDivisionError as expected from pool.apply()') + else: + raise AssertionError('expected ZeroDivisionError') - t = time.time() - B = pool.map(pow3, range(N)) - print('\tpool.map(pow3, range(%d)):\n\t\t%s seconds' % \ - (N, time.time() - t)) + try: + print(pool.map(f, list(range(10)))) + except ZeroDivisionError: + print('\tGot ZeroDivisionError as expected from pool.map()') + else: + raise AssertionError('expected ZeroDivisionError') - t = time.time() - C = list(pool.imap(pow3, range(N), chunksize=N//8)) - print('\tlist(pool.imap(pow3, range(%d), chunksize=%d)):\n\t\t%s' \ - ' seconds' % (N, N//8, time.time() - t)) + try: + print(list(pool.imap(f, list(range(10))))) + except ZeroDivisionError: + print('\tGot ZeroDivisionError as expected from list(pool.imap())') + else: + raise AssertionError('expected ZeroDivisionError') - assert A == B == C, (len(A), len(B), len(C)) - print() + it = pool.imap(f, list(range(10))) + for i in range(10): + try: + x = next(it) + except ZeroDivisionError: + if i == 5: + pass + except StopIteration: + break + else: + if i == 5: + raise AssertionError('expected ZeroDivisionError') - L = [None] * 1000000 - print('def noop(x): pass') - print('L = [None] * 1000000') + assert i == 9 + print('\tGot ZeroDivisionError as expected from IMapIterator.next()') + print() - t = time.time() - A = list(map(noop, L)) - print('\tmap(noop, L):\n\t\t%s seconds' % \ - (time.time() - t)) + # + # Testing timeouts + # - t = time.time() - B = pool.map(noop, L) - print('\tpool.map(noop, L):\n\t\t%s seconds' % \ - (time.time() - t)) + print('Testing ApplyResult.get() with timeout:', end=' ') + res = pool.apply_async(calculate, TASKS[0]) + while 1: + sys.stdout.flush() + try: + sys.stdout.write('\n\t%s' % res.get(0.02)) + break + except multiprocessing.TimeoutError: + sys.stdout.write('.') + print() + print() - t = time.time() - C = list(pool.imap(noop, L, chunksize=len(L)//8)) - print('\tlist(pool.imap(noop, L, chunksize=%d)):\n\t\t%s seconds' % \ - (len(L)//8, time.time() - t)) - - assert A == B == C, (len(A), len(B), len(C)) - print() - - del A, B, C, L - - # - # Test error handling - # - - print('Testing error handling:') - - try: - print(pool.apply(f, (5,))) - except ZeroDivisionError: - print('\tGot ZeroDivisionError as expected from pool.apply()') - else: - raise AssertionError('expected ZeroDivisionError') - - try: - print(pool.map(f, list(range(10)))) - except ZeroDivisionError: - print('\tGot ZeroDivisionError as expected from pool.map()') - else: - raise AssertionError('expected ZeroDivisionError') - - try: - print(list(pool.imap(f, list(range(10))))) - except ZeroDivisionError: - print('\tGot ZeroDivisionError as expected from list(pool.imap())') - else: - raise AssertionError('expected ZeroDivisionError') - - it = pool.imap(f, list(range(10))) - for i in range(10): - try: - x = next(it) - except ZeroDivisionError: - if i == 5: - pass - except StopIteration: - break - else: - if i == 5: - raise AssertionError('expected ZeroDivisionError') - - assert i == 9 - print('\tGot ZeroDivisionError as expected from IMapIterator.next()') - print() - - # - # Testing timeouts - # - - print('Testing ApplyResult.get() with timeout:', end=' ') - res = pool.apply_async(calculate, TASKS[0]) - while 1: - sys.stdout.flush() - try: - sys.stdout.write('\n\t%s' % res.get(0.02)) - break - except multiprocessing.TimeoutError: - sys.stdout.write('.') - print() - print() - - print('Testing IMapIterator.next() with timeout:', end=' ') - it = pool.imap(calculatestar, TASKS) - while 1: - sys.stdout.flush() - try: - sys.stdout.write('\n\t%s' % it.next(0.02)) - except StopIteration: - break - except multiprocessing.TimeoutError: - sys.stdout.write('.') - print() - print() - - # - # Testing callback - # - - print('Testing callback:') - - A = [] - B = [56, 0, 1, 8, 27, 64, 125, 216, 343, 512, 729] - - r = pool.apply_async(mul, (7, 8), callback=A.append) - r.wait() - - r = pool.map_async(pow3, list(range(10)), callback=A.extend) - r.wait() - - if A == B: - print('\tcallbacks succeeded\n') - else: - print('\t*** callbacks failed\n\t\t%s != %s\n' % (A, B)) - - # - # Check there are no outstanding tasks - # - - assert not pool._cache, 'cache = %r' % pool._cache - - # - # Check close() methods - # - - print('Testing close():') - - for worker in pool._pool: - assert worker.is_alive() - - result = pool.apply_async(time.sleep, [0.5]) - pool.close() - pool.join() - - assert result.get() is None - - for worker in pool._pool: - assert not worker.is_alive() - - print('\tclose() succeeded\n') - - # - # Check terminate() method - # - - print('Testing terminate():') - - pool = multiprocessing.Pool(2) - DELTA = 0.1 - ignore = pool.apply(pow3, [2]) - results = [pool.apply_async(time.sleep, [DELTA]) for i in range(100)] - pool.terminate() - pool.join() - - for worker in pool._pool: - assert not worker.is_alive() - - print('\tterminate() succeeded\n') - - # - # Check garbage collection - # - - print('Testing garbage collection:') - - pool = multiprocessing.Pool(2) - DELTA = 0.1 - processes = pool._pool - ignore = pool.apply(pow3, [2]) - results = [pool.apply_async(time.sleep, [DELTA]) for i in range(100)] - - results = pool = None - - time.sleep(DELTA * 2) - - for worker in processes: - assert not worker.is_alive() - - print('\tgarbage collection succeeded\n') + print('Testing IMapIterator.next() with timeout:', end=' ') + it = pool.imap(calculatestar, TASKS) + while 1: + sys.stdout.flush() + try: + sys.stdout.write('\n\t%s' % it.next(0.02)) + except StopIteration: + break + except multiprocessing.TimeoutError: + sys.stdout.write('.') + print() + print() if __name__ == '__main__': multiprocessing.freeze_support() - - assert len(sys.argv) in (1, 2) - - if len(sys.argv) == 1 or sys.argv[1] == 'processes': - print(' Using processes '.center(79, '-')) - elif sys.argv[1] == 'threads': - print(' Using threads '.center(79, '-')) - import multiprocessing.dummy as multiprocessing - else: - print('Usage:\n\t%s [processes | threads]' % sys.argv[0]) - raise SystemExit(2) - test() diff --git a/Doc/includes/mp_synchronize.py b/Doc/includes/mp_synchronize.py deleted file mode 100644 --- a/Doc/includes/mp_synchronize.py +++ /dev/null @@ -1,278 +0,0 @@ -# -# A test file for the `multiprocessing` package -# -# Copyright (c) 2006-2008, R Oudkerk -# All rights reserved. -# - -import time -import sys -import random -from queue import Empty - -import multiprocessing # may get overwritten - - -#### TEST_VALUE - -def value_func(running, mutex): - random.seed() - time.sleep(random.random()*4) - - mutex.acquire() - print('\n\t\t\t' + str(multiprocessing.current_process()) + ' has finished') - running.value -= 1 - mutex.release() - -def test_value(): - TASKS = 10 - running = multiprocessing.Value('i', TASKS) - mutex = multiprocessing.Lock() - - for i in range(TASKS): - p = multiprocessing.Process(target=value_func, args=(running, mutex)) - p.start() - - while running.value > 0: - time.sleep(0.08) - mutex.acquire() - print(running.value, end=' ') - sys.stdout.flush() - mutex.release() - - print() - print('No more running processes') - - -#### TEST_QUEUE - -def queue_func(queue): - for i in range(30): - time.sleep(0.5 * random.random()) - queue.put(i*i) - queue.put('STOP') - -def test_queue(): - q = multiprocessing.Queue() - - p = multiprocessing.Process(target=queue_func, args=(q,)) - p.start() - - o = None - while o != 'STOP': - try: - o = q.get(timeout=0.3) - print(o, end=' ') - sys.stdout.flush() - except Empty: - print('TIMEOUT') - - print() - - -#### TEST_CONDITION - -def condition_func(cond): - cond.acquire() - print('\t' + str(cond)) - time.sleep(2) - print('\tchild is notifying') - print('\t' + str(cond)) - cond.notify() - cond.release() - -def test_condition(): - cond = multiprocessing.Condition() - - p = multiprocessing.Process(target=condition_func, args=(cond,)) - print(cond) - - cond.acquire() - print(cond) - cond.acquire() - print(cond) - - p.start() - - print('main is waiting') - cond.wait() - print('main has woken up') - - print(cond) - cond.release() - print(cond) - cond.release() - - p.join() - print(cond) - - -#### TEST_SEMAPHORE - -def semaphore_func(sema, mutex, running): - sema.acquire() - - mutex.acquire() - running.value += 1 - print(running.value, 'tasks are running') - mutex.release() - - random.seed() - time.sleep(random.random()*2) - - mutex.acquire() - running.value -= 1 - print('%s has finished' % multiprocessing.current_process()) - mutex.release() - - sema.release() - -def test_semaphore(): - sema = multiprocessing.Semaphore(3) - mutex = multiprocessing.RLock() - running = multiprocessing.Value('i', 0) - - processes = [ - multiprocessing.Process(target=semaphore_func, - args=(sema, mutex, running)) - for i in range(10) - ] - - for p in processes: - p.start() - - for p in processes: - p.join() - - -#### TEST_JOIN_TIMEOUT - -def join_timeout_func(): - print('\tchild sleeping') - time.sleep(5.5) - print('\n\tchild terminating') - -def test_join_timeout(): - p = multiprocessing.Process(target=join_timeout_func) - p.start() - - print('waiting for process to finish') - - while 1: - p.join(timeout=1) - if not p.is_alive(): - break - print('.', end=' ') - sys.stdout.flush() - - -#### TEST_EVENT - -def event_func(event): - print('\t%r is waiting' % multiprocessing.current_process()) - event.wait() - print('\t%r has woken up' % multiprocessing.current_process()) - -def test_event(): - event = multiprocessing.Event() - - processes = [multiprocessing.Process(target=event_func, args=(event,)) - for i in range(5)] - - for p in processes: - p.start() - - print('main is sleeping') - time.sleep(2) - - print('main is setting event') - event.set() - - for p in processes: - p.join() - - -#### TEST_SHAREDVALUES - -def sharedvalues_func(values, arrays, shared_values, shared_arrays): - for i in range(len(values)): - v = values[i][1] - sv = shared_values[i].value - assert v == sv - - for i in range(len(values)): - a = arrays[i][1] - sa = list(shared_arrays[i][:]) - assert a == sa - - print('Tests passed') - -def test_sharedvalues(): - values = [ - ('i', 10), - ('h', -2), - ('d', 1.25) - ] - arrays = [ - ('i', list(range(100))), - ('d', [0.25 * i for i in range(100)]), - ('H', list(range(1000))) - ] - - shared_values = [multiprocessing.Value(id, v) for id, v in values] - shared_arrays = [multiprocessing.Array(id, a) for id, a in arrays] - - p = multiprocessing.Process( - target=sharedvalues_func, - args=(values, arrays, shared_values, shared_arrays) - ) - p.start() - p.join() - - assert p.exitcode == 0 - - -#### - -def test(namespace=multiprocessing): - global multiprocessing - - multiprocessing = namespace - - for func in [test_value, test_queue, test_condition, - test_semaphore, test_join_timeout, test_event, - test_sharedvalues]: - - print('\n\t######## %s\n' % func.__name__) - func() - - ignore = multiprocessing.active_children() # cleanup any old processes - if hasattr(multiprocessing, '_debug_info'): - info = multiprocessing._debug_info() - if info: - print(info) - raise ValueError('there should be no positive refcounts left') - - -if __name__ == '__main__': - multiprocessing.freeze_support() - - assert len(sys.argv) in (1, 2) - - if len(sys.argv) == 1 or sys.argv[1] == 'processes': - print(' Using processes '.center(79, '-')) - namespace = multiprocessing - elif sys.argv[1] == 'manager': - print(' Using processes and a manager '.center(79, '-')) - namespace = multiprocessing.Manager() - namespace.Process = multiprocessing.Process - namespace.current_process = multiprocessing.current_process - namespace.active_children = multiprocessing.active_children - elif sys.argv[1] == 'threads': - print(' Using threads '.center(79, '-')) - import multiprocessing.dummy as namespace - else: - print('Usage:\n\t%s [processes | manager | threads]' % sys.argv[0]) - raise SystemExit(2) - - test(namespace) diff --git a/Doc/includes/mp_webserver.py b/Doc/includes/mp_webserver.py deleted file mode 100644 --- a/Doc/includes/mp_webserver.py +++ /dev/null @@ -1,70 +0,0 @@ -# -# Example where a pool of http servers share a single listening socket -# -# On Windows this module depends on the ability to pickle a socket -# object so that the worker processes can inherit a copy of the server -# object. (We import `multiprocessing.reduction` to enable this pickling.) -# -# Not sure if we should synchronize access to `socket.accept()` method by -# using a process-shared lock -- does not seem to be necessary. -# -# Copyright (c) 2006-2008, R Oudkerk -# All rights reserved. -# - -import os -import sys - -from multiprocessing import Process, current_process, freeze_support -from http.server import HTTPServer -from http.server import SimpleHTTPRequestHandler - -if sys.platform == 'win32': - import multiprocessing.reduction # make sockets pickable/inheritable - - -def note(format, *args): - sys.stderr.write('[%s]\t%s\n' % (current_process().name, format % args)) - - -class RequestHandler(SimpleHTTPRequestHandler): - # we override log_message() to show which process is handling the request - def log_message(self, format, *args): - note(format, *args) - -def serve_forever(server): - note('starting server') - try: - server.serve_forever() - except KeyboardInterrupt: - pass - - -def runpool(address, number_of_processes): - # create a single server object -- children will each inherit a copy - server = HTTPServer(address, RequestHandler) - - # create child processes to act as workers - for i in range(number_of_processes - 1): - Process(target=serve_forever, args=(server,)).start() - - # main process also acts as a worker - serve_forever(server) - - -def test(): - DIR = os.path.join(os.path.dirname(__file__), '..') - ADDRESS = ('localhost', 8000) - NUMBER_OF_PROCESSES = 4 - - print('Serving at http://%s:%d using %d worker processes' % \ - (ADDRESS[0], ADDRESS[1], NUMBER_OF_PROCESSES)) - print('To exit press Ctrl-' + ['C', 'Break'][sys.platform=='win32']) - - os.chdir(DIR) - runpool(ADDRESS, NUMBER_OF_PROCESSES) - - -if __name__ == '__main__': - freeze_support() - test() diff --git a/Doc/includes/mp_workers.py b/Doc/includes/mp_workers.py --- a/Doc/includes/mp_workers.py +++ b/Doc/includes/mp_workers.py @@ -1,16 +1,3 @@ -# -# Simple example which uses a pool of workers to carry out some tasks. -# -# Notice that the results will probably not come out of the output -# queue in the same in the same order as the corresponding tasks were -# put on the input queue. If it is important to get the results back -# in the original order then consider using `Pool.map()` or -# `Pool.imap()` (which will save on the amount of code needed anyway). -# -# Copyright (c) 2006-2008, R Oudkerk -# All rights reserved. -# - import time import random diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -93,11 +93,80 @@ p.start() p.join() -For an explanation of why (on Windows) the ``if __name__ == '__main__'`` part is +For an explanation of why the ``if __name__ == '__main__'`` part is necessary, see :ref:`multiprocessing-programming`. +Start methods +~~~~~~~~~~~~~ + +Depending on the platform, :mod:`multiprocessing` supports three ways +to start a process. These *start methods* are + + *spawn* + The parent process starts a fresh python interpreter process. The + child process will only inherit those resources necessary to run + the process objects :meth:`~Process.run` method. In particular, + unnecessary file descriptors and handles from the parent process + will not be inherited. Starting a process using this method is + rather slow compared to using *fork* or *forkserver*. + + Available on Unix and Windows. The default on Windows. + + *fork* + The parent process uses :func:`os.fork` to fork the Python + interpreter. The child process, when it begins, is effectively + identical to the parent process. All resources of the parent are + inherited by the child process. Note that safely forking a + multithreaded process is problematic. + + Available on Unix only. The default on Unix. + + *forkserver* + When the program starts and selects the *forkserver* start method, + a server process is started. From then on, whenever a new process + is need the parent process connects to the server and requests + that it fork a new process. The fork server process is single + threaded so it is safe for it to use :func:`os.fork`. No + unnecessary resources are inherited. + + Available on Unix platforms which support passing file descriptors + over unix pipes. + +Before Python 3.4 *fork* was the only option available on Unix. Also, +prior to Python 3.4, child processes would inherit all the parents +inheritable handles on Windows. + +On Unix using the *spawn* or *forkserver* start methods will also +start a *semaphore tracker* process which tracks the unlinked named +semaphores created by processes of the program. When all processes +have exited the semaphore tracker unlinks any remaining semaphores. +Usually there should be none, but if a process was killed by a signal +there may some "leaked" semaphores. (Unlinking the named semaphores +is a serious matter since the system allows only a limited number, and +they will not be automatically unlinked until the next reboot.) + +To select the a start method you use the :func:`set_start_method` in +the ``if __name__ == '__main__'`` clause of the main module. For +example:: + + import multiprocessing as mp + + def foo(): + print('hello') + + if __name__ == '__main__': + mp.set_start_method('spawn') + p = mp.Process(target=foo) + p.start() + p.join() + +:func:`set_start_method` should not be used more than once in the +program. + + + Exchanging objects between processes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -274,15 +343,31 @@ For example:: from multiprocessing import Pool + from time import sleep def f(x): return x*x if __name__ == '__main__': - with Pool(processes=4) as pool: # start 4 worker processes - result = pool.apply_async(f, [10]) # evaluate "f(10)" asynchronously - print(result.get(timeout=1)) # prints "100" unless your computer is *very* slow - print(pool.map(f, range(10))) # prints "[0, 1, 4,..., 81]" + # start 4 worker processes + with Pool(processes=4) as pool: + + # print "[0, 1, 4,..., 81]" + print(pool.map(f, range(10))) + + # print same numbers in arbitrary order + for i in pool.imap_unordered(f, range(10)): + print(i) + + # evaluate "f(10)" asynchronously + res = pool.apply_async(f, [10]) + print(res.get(timeout=1)) # prints "100" + + # make worker sleep for 10 secs + res = pool.apply_async(sleep, 10) + print(res.get(timeout=1)) # raises multiprocessing.TimeoutError + + # exiting the 'with'-block has stopped the pool Note that the methods of a pool should only ever be used by the process which created it. @@ -763,6 +848,24 @@ If the module is being run normally by the Python interpreter then :func:`freeze_support` has no effect. +.. function:: get_all_start_methods() + + Returns a list of the supported start methods, the first of which + is the default. The possible start methods are ``'fork'``, + ``'spawn'`` and ``'forkserver'``. On Windows only ``'spawn'`` is + available. On Unix ``'fork'`` and ``'spawn'`` are always + supported, with ``'fork'`` being the default. + + .. versionadded:: 3.4 + +.. function:: get_start_method() + + Return the current start method. This can be ``'fork'``, + ``'spawn'`` or ``'forkserver'``. ``'fork'`` is the default on + Unix, while ``'spawn'`` is the default on Windows. + + .. versionadded:: 3.4 + .. function:: set_executable() Sets the path of the Python interpreter to use when starting a child process. @@ -771,8 +874,21 @@ set_executable(os.path.join(sys.exec_prefix, 'pythonw.exe')) - before they can create child processes. (Windows only) - + before they can create child processes. + + .. versionchanged:: 3.4 + Now supported on Unix when the ``'spawn'`` start method is used. + +.. function:: set_start_method(method) + + Set the method which should be used to start child processes. + *method* can be ``'fork'``, ``'spawn'`` or ``'forkserver'``. + + Note that this should be called at most once, and it should be + protected inside the ``if __name__ == '__main__'`` clause of the + main module. + + .. versionadded:: 3.4 .. note:: @@ -2175,43 +2291,8 @@ [INFO/MainProcess] sending shutdown message to manager [INFO/SyncManager-...] manager exiting with exitcode 0 -In addition to having these two logging functions, the multiprocessing also -exposes two additional logging level attributes. These are :const:`SUBWARNING` -and :const:`SUBDEBUG`. The table below illustrates where theses fit in the -normal level hierarchy. - -+----------------+----------------+ -| Level | Numeric value | -+================+================+ -| ``SUBWARNING`` | 25 | -+----------------+----------------+ -| ``SUBDEBUG`` | 5 | -+----------------+----------------+ - For a full table of logging levels, see the :mod:`logging` module. -These additional logging levels are used primarily for certain debug messages -within the multiprocessing module. Below is the same example as above, except -with :const:`SUBDEBUG` enabled:: - - >>> import multiprocessing, logging - >>> logger = multiprocessing.log_to_stderr() - >>> logger.setLevel(multiprocessing.SUBDEBUG) - >>> logger.warning('doomed') - [WARNING/MainProcess] doomed - >>> m = multiprocessing.Manager() - [INFO/SyncManager-...] child process calling self.run() - [INFO/SyncManager-...] created temp directory /.../pymp-... - [INFO/SyncManager-...] manager serving at '/.../pymp-djGBXN/listener-...' - >>> del m - [SUBDEBUG/MainProcess] finalizer calling ... - [INFO/MainProcess] sending shutdown message to manager - [DEBUG/SyncManager-...] manager received shutdown message - [SUBDEBUG/SyncManager-...] calling ... - [SUBDEBUG/SyncManager-...] calling - [SUBDEBUG/SyncManager-...] finalizer calling ... - [INFO/SyncManager-...] manager exiting with exitcode 0 The :mod:`multiprocessing.dummy` module ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -2232,8 +2313,10 @@ :mod:`multiprocessing`. -All platforms -~~~~~~~~~~~~~ +All start methods +~~~~~~~~~~~~~~~~~ + +The following applies to all start methods. Avoid shared state @@ -2266,11 +2349,13 @@ Better to inherit than pickle/unpickle - On Windows many types from :mod:`multiprocessing` need to be picklable so - that child processes can use them. However, one should generally avoid - sending shared objects to other processes using pipes or queues. Instead - you should arrange the program so that a process which needs access to a - shared resource created elsewhere can inherit it from an ancestor process. + When using the *spawn* or *forkserver* start methods many types + from :mod:`multiprocessing` need to be picklable so that child + processes can use them. However, one should generally avoid + sending shared objects to other processes using pipes or queues. + Instead you should arrange the program so that a process which + needs access to a shared resource created elsewhere can inherit it + from an ancestor process. Avoid terminating processes @@ -2314,15 +2399,17 @@ Explicitly pass resources to child processes - On Unix a child process can make use of a shared resource created in a - parent process using a global resource. However, it is better to pass the - object as an argument to the constructor for the child process. - - Apart from making the code (potentially) compatible with Windows this also - ensures that as long as the child process is still alive the object will not - be garbage collected in the parent process. This might be important if some - resource is freed when the object is garbage collected in the parent - process. + On Unix using the *fork* start method, a child process can make + use of a shared resource created in a parent process using a + global resource. However, it is better to pass the object as an + argument to the constructor for the child process. + + Apart from making the code (potentially) compatible with Windows + and the other start methods this also ensures that as long as the + child process is still alive the object will not be garbage + collected in the parent process. This might be important if some + resource is freed when the object is garbage collected in the + parent process. So for instance :: @@ -2381,17 +2468,19 @@ For more information, see :issue:`5155`, :issue:`5313` and :issue:`5331` -Windows -~~~~~~~ - -Since Windows lacks :func:`os.fork` it has a few extra restrictions: +The *spawn* and *forkserver* start methods +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +There are a few extra restriction which don't apply to the *fork* +start method. More picklability - Ensure that all arguments to :meth:`Process.__init__` are picklable. This - means, in particular, that bound or unbound methods cannot be used directly - as the ``target`` argument on Windows --- just define a function and use - that instead. + Ensure that all arguments to :meth:`Process.__init__` are + picklable. This means, in particular, that bound or unbound + methods cannot be used directly as the ``target`` (unless you use + the *fork* start method) --- just define a function and use that + instead. Also, if you subclass :class:`Process` then make sure that instances will be picklable when the :meth:`Process.start` method is called. @@ -2411,7 +2500,8 @@ interpreter without causing unintended side effects (such a starting a new process). - For example, under Windows running the following module would fail with a + For example, using the *spawn* or *forkserver* start method + running the following module would fail with a :exc:`RuntimeError`:: from multiprocessing import Process @@ -2425,13 +2515,14 @@ Instead one should protect the "entry point" of the program by using ``if __name__ == '__main__':`` as follows:: - from multiprocessing import Process, freeze_support + from multiprocessing import Process, freeze_support, set_start_method def foo(): print('hello') if __name__ == '__main__': freeze_support() + set_start_method('spawn') p = Process(target=foo) p.start() @@ -2462,26 +2553,7 @@ :language: python3 -Synchronization types like locks, conditions and queues: - -.. literalinclude:: ../includes/mp_synchronize.py - :language: python3 - - An example showing how to use queues to feed tasks to a collection of worker processes and collect the results: .. literalinclude:: ../includes/mp_workers.py - - -An example of how a pool of worker processes can each run a -:class:`~http.server.SimpleHTTPRequestHandler` instance while sharing a single -listening socket. - -.. literalinclude:: ../includes/mp_webserver.py - - -Some simple benchmarks comparing :mod:`multiprocessing` with :mod:`threading`: - -.. literalinclude:: ../includes/mp_benchmarks.py - diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -108,6 +108,8 @@ * Single-dispatch generic functions (:pep:`443`) * SHA-3 (Keccak) support for :mod:`hashlib`. * TLSv1.1 and TLSv1.2 support for :mod:`ssl`. +* :mod:`multiprocessing` now has option to avoid using :func:`os.fork` + on Unix (:issue:`8713`). Security improvements: @@ -254,6 +256,17 @@ (Contributed by Valerie Lambert in :issue:`4885`.) +multiprocessing +--------------- + +On Unix two new *start methods* have been added for starting processes +using :mod:`multiprocessing`. These make the mixing of processes with +threads more robust. See :issue:`8713`. + +Also, except when using the old *fork* start method, child processes +will no longer inherit unneeded handles/file descriptors from their parents. + + poplib ------ diff --git a/Lib/multiprocessing/__init__.py b/Lib/multiprocessing/__init__.py --- a/Lib/multiprocessing/__init__.py +++ b/Lib/multiprocessing/__init__.py @@ -21,6 +21,8 @@ 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event', 'Barrier', 'Queue', 'SimpleQueue', 'JoinableQueue', 'Pool', 'Value', 'Array', 'RawValue', 'RawArray', 'SUBDEBUG', 'SUBWARNING', + 'set_executable', 'set_start_method', 'get_start_method', + 'get_all_start_methods', 'set_forkserver_preload' ] # @@ -30,8 +32,14 @@ import os import sys -from multiprocessing.process import Process, current_process, active_children -from multiprocessing.util import SUBDEBUG, SUBWARNING +from .process import Process, current_process, active_children + +# +# XXX These should not really be documented or public. +# + +SUBDEBUG = 5 +SUBWARNING = 25 # # Alias for main module -- will be reset by bootstrapping child processes @@ -56,8 +64,6 @@ class AuthenticationError(ProcessError): pass -import _multiprocessing - # # Definitions not depending on native semaphores # @@ -69,7 +75,7 @@ The managers methods such as `Lock()`, `Condition()` and `Queue()` can be used to create shared objects. ''' - from multiprocessing.managers import SyncManager + from .managers import SyncManager m = SyncManager() m.start() return m @@ -78,7 +84,7 @@ ''' Returns two connection object connected by a pipe ''' - from multiprocessing.connection import Pipe + from .connection import Pipe return Pipe(duplex) def cpu_count(): @@ -97,21 +103,21 @@ If so then run code specified by commandline and exit. ''' if sys.platform == 'win32' and getattr(sys, 'frozen', False): - from multiprocessing.forking import freeze_support + from .spawn import freeze_support freeze_support() def get_logger(): ''' Return package logger -- if it does not already exist then it is created ''' - from multiprocessing.util import get_logger + from .util import get_logger return get_logger() def log_to_stderr(level=None): ''' Turn on logging and add a handler which prints to stderr ''' - from multiprocessing.util import log_to_stderr + from .util import log_to_stderr return log_to_stderr(level) def allow_connection_pickling(): @@ -120,7 +126,7 @@ ''' # This is undocumented. In previous versions of multiprocessing # its only effect was to make socket objects inheritable on Windows. - import multiprocessing.connection + from . import connection # # Definitions depending on native semaphores @@ -130,120 +136,151 @@ ''' Returns a non-recursive lock object ''' - from multiprocessing.synchronize import Lock + from .synchronize import Lock return Lock() def RLock(): ''' Returns a recursive lock object ''' - from multiprocessing.synchronize import RLock + from .synchronize import RLock return RLock() def Condition(lock=None): ''' Returns a condition object ''' - from multiprocessing.synchronize import Condition + from .synchronize import Condition return Condition(lock) def Semaphore(value=1): ''' Returns a semaphore object ''' - from multiprocessing.synchronize import Semaphore + from .synchronize import Semaphore return Semaphore(value) def BoundedSemaphore(value=1): ''' Returns a bounded semaphore object ''' - from multiprocessing.synchronize import BoundedSemaphore + from .synchronize import BoundedSemaphore return BoundedSemaphore(value) def Event(): ''' Returns an event object ''' - from multiprocessing.synchronize import Event + from .synchronize import Event return Event() def Barrier(parties, action=None, timeout=None): ''' Returns a barrier object ''' - from multiprocessing.synchronize import Barrier + from .synchronize import Barrier return Barrier(parties, action, timeout) def Queue(maxsize=0): ''' Returns a queue object ''' - from multiprocessing.queues import Queue + from .queues import Queue return Queue(maxsize) def JoinableQueue(maxsize=0): ''' Returns a queue object ''' - from multiprocessing.queues import JoinableQueue + from .queues import JoinableQueue return JoinableQueue(maxsize) def SimpleQueue(): ''' Returns a queue object ''' - from multiprocessing.queues import SimpleQueue + from .queues import SimpleQueue return SimpleQueue() def Pool(processes=None, initializer=None, initargs=(), maxtasksperchild=None): ''' Returns a process pool object ''' - from multiprocessing.pool import Pool + from .pool import Pool return Pool(processes, initializer, initargs, maxtasksperchild) def RawValue(typecode_or_type, *args): ''' Returns a shared object ''' - from multiprocessing.sharedctypes import RawValue + from .sharedctypes import RawValue return RawValue(typecode_or_type, *args) def RawArray(typecode_or_type, size_or_initializer): ''' Returns a shared array ''' - from multiprocessing.sharedctypes import RawArray + from .sharedctypes import RawArray return RawArray(typecode_or_type, size_or_initializer) def Value(typecode_or_type, *args, lock=True): ''' Returns a synchronized shared object ''' - from multiprocessing.sharedctypes import Value + from .sharedctypes import Value return Value(typecode_or_type, *args, lock=lock) def Array(typecode_or_type, size_or_initializer, *, lock=True): ''' Returns a synchronized shared array ''' - from multiprocessing.sharedctypes import Array + from .sharedctypes import Array return Array(typecode_or_type, size_or_initializer, lock=lock) # # # -if sys.platform == 'win32': +def set_executable(executable): + ''' + Sets the path to a python.exe or pythonw.exe binary used to run + child processes instead of sys.executable when using the 'spawn' + start method. Useful for people embedding Python. + ''' + from .spawn import set_executable + set_executable(executable) - def set_executable(executable): - ''' - Sets the path to a python.exe or pythonw.exe binary used to run - child processes on Windows instead of sys.executable. - Useful for people embedding Python. - ''' - from multiprocessing.forking import set_executable - set_executable(executable) +def set_start_method(method): + ''' + Set method for starting processes: 'fork', 'spawn' or 'forkserver'. + ''' + from .popen import set_start_method + set_start_method(method) - __all__ += ['set_executable'] +def get_start_method(): + ''' + Get method for starting processes: 'fork', 'spawn' or 'forkserver'. + ''' + from .popen import get_start_method + return get_start_method() + +def get_all_start_methods(): + ''' + Get list of availables start methods, default first. + ''' + from .popen import get_all_start_methods + return get_all_start_methods() + +def set_forkserver_preload(module_names): + ''' + Set list of module names to try to load in the forkserver process + when it is started. Properly chosen this can significantly reduce + the cost of starting a new process using the forkserver method. + The default list is ['__main__']. + ''' + try: + from .forkserver import set_forkserver_preload + except ImportError: + pass + else: + set_forkserver_preload(module_names) diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -21,9 +21,13 @@ import itertools import _multiprocessing -from multiprocessing import current_process, AuthenticationError, BufferTooShort -from multiprocessing.util import get_temp_dir, Finalize, sub_debug, debug -from multiprocessing.forking import ForkingPickler + +from . import reduction +from . import util + +from . import AuthenticationError, BufferTooShort +from .reduction import ForkingPickler + try: import _winapi from _winapi import WAIT_OBJECT_0, WAIT_TIMEOUT, INFINITE @@ -71,7 +75,7 @@ if family == 'AF_INET': return ('localhost', 0) elif family == 'AF_UNIX': - return tempfile.mktemp(prefix='listener-', dir=get_temp_dir()) + return tempfile.mktemp(prefix='listener-', dir=util.get_temp_dir()) elif family == 'AF_PIPE': return tempfile.mktemp(prefix=r'\\.\pipe\pyc-%d-%d-' % (os.getpid(), next(_mmap_counter))) @@ -505,7 +509,7 @@ c1 = Connection(s1.detach()) c2 = Connection(s2.detach()) else: - fd1, fd2 = os.pipe() + fd1, fd2 = util.pipe() c1 = Connection(fd1, writable=False) c2 = Connection(fd2, readable=False) @@ -577,7 +581,7 @@ self._last_accepted = None if family == 'AF_UNIX': - self._unlink = Finalize( + self._unlink = util.Finalize( self, os.unlink, args=(address,), exitpriority=0 ) else: @@ -625,8 +629,8 @@ self._handle_queue = [self._new_handle(first=True)] self._last_accepted = None - sub_debug('listener created with address=%r', self._address) - self.close = Finalize( + util.sub_debug('listener created with address=%r', self._address) + self.close = util.Finalize( self, PipeListener._finalize_pipe_listener, args=(self._handle_queue, self._address), exitpriority=0 ) @@ -668,7 +672,7 @@ @staticmethod def _finalize_pipe_listener(queue, address): - sub_debug('closing listener with address=%r', address) + util.sub_debug('closing listener with address=%r', address) for handle in queue: _winapi.CloseHandle(handle) @@ -919,15 +923,32 @@ # if sys.platform == 'win32': - from . import reduction - ForkingPickler.register(socket.socket, reduction.reduce_socket) - ForkingPickler.register(Connection, reduction.reduce_connection) - ForkingPickler.register(PipeConnection, reduction.reduce_pipe_connection) + def reduce_connection(conn): + handle = conn.fileno() + with socket.fromfd(handle, socket.AF_INET, socket.SOCK_STREAM) as s: + from . import resource_sharer + ds = resource_sharer.DupSocket(s) + return rebuild_connection, (ds, conn.readable, conn.writable) + def rebuild_connection(ds, readable, writable): + sock = ds.detach() + return Connection(sock.detach(), readable, writable) + reduction.register(Connection, reduce_connection) + + def reduce_pipe_connection(conn): + access = ((_winapi.FILE_GENERIC_READ if conn.readable else 0) | + (_winapi.FILE_GENERIC_WRITE if conn.writable else 0)) + dh = reduction.DupHandle(conn.fileno(), access) + return rebuild_pipe_connection, (dh, conn.readable, conn.writable) + def rebuild_pipe_connection(dh, readable, writable): + handle = dh.detach() + return PipeConnection(handle, readable, writable) + reduction.register(PipeConnection, reduce_pipe_connection) + else: - try: - from . import reduction - except ImportError: - pass - else: - ForkingPickler.register(socket.socket, reduction.reduce_socket) - ForkingPickler.register(Connection, reduction.reduce_connection) + def reduce_connection(conn): + df = reduction.DupFd(conn.fileno()) + return rebuild_connection, (df, conn.readable, conn.writable) + def rebuild_connection(df, readable, writable): + fd = df.detach() + return Connection(fd, readable, writable) + reduction.register(Connection, reduce_connection) diff --git a/Lib/multiprocessing/dummy/__init__.py b/Lib/multiprocessing/dummy/__init__.py --- a/Lib/multiprocessing/dummy/__init__.py +++ b/Lib/multiprocessing/dummy/__init__.py @@ -22,7 +22,7 @@ import weakref import array -from multiprocessing.dummy.connection import Pipe +from .connection import Pipe from threading import Lock, RLock, Semaphore, BoundedSemaphore from threading import Event, Condition, Barrier from queue import Queue @@ -113,7 +113,7 @@ pass def Pool(processes=None, initializer=None, initargs=()): - from multiprocessing.pool import ThreadPool + from ..pool import ThreadPool return ThreadPool(processes, initializer, initargs) JoinableQueue = Queue diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py deleted file mode 100644 --- a/Lib/multiprocessing/forking.py +++ /dev/null @@ -1,477 +0,0 @@ -# -# Module for starting a process object using os.fork() or CreateProcess() -# -# multiprocessing/forking.py -# -# Copyright (c) 2006-2008, R Oudkerk -# Licensed to PSF under a Contributor Agreement. -# - -import io -import os -import pickle -import sys -import signal -import errno - -from multiprocessing import util, process - -__all__ = ['Popen', 'assert_spawning', 'duplicate', 'close', 'ForkingPickler'] - -# -# Check that the current thread is spawning a child process -# - -def assert_spawning(self): - if not Popen.thread_is_spawning(): - raise RuntimeError( - '%s objects should only be shared between processes' - ' through inheritance' % type(self).__name__ - ) - -# -# Try making some callable types picklable -# - -from pickle import Pickler -from copyreg import dispatch_table - -class ForkingPickler(Pickler): - _extra_reducers = {} - def __init__(self, *args): - Pickler.__init__(self, *args) - self.dispatch_table = dispatch_table.copy() - self.dispatch_table.update(self._extra_reducers) - @classmethod - def register(cls, type, reduce): - cls._extra_reducers[type] = reduce - - @staticmethod - def dumps(obj): - buf = io.BytesIO() - ForkingPickler(buf, pickle.HIGHEST_PROTOCOL).dump(obj) - return buf.getbuffer() - - loads = pickle.loads - - -def _reduce_method(m): - if m.__self__ is None: - return getattr, (m.__class__, m.__func__.__name__) - else: - return getattr, (m.__self__, m.__func__.__name__) -class _C: - def f(self): - pass -ForkingPickler.register(type(_C().f), _reduce_method) - - -def _reduce_method_descriptor(m): - return getattr, (m.__objclass__, m.__name__) -ForkingPickler.register(type(list.append), _reduce_method_descriptor) -ForkingPickler.register(type(int.__add__), _reduce_method_descriptor) - -try: - from functools import partial -except ImportError: - pass -else: - def _reduce_partial(p): - return _rebuild_partial, (p.func, p.args, p.keywords or {}) - def _rebuild_partial(func, args, keywords): - return partial(func, *args, **keywords) - ForkingPickler.register(partial, _reduce_partial) - -# -# Unix -# - -if sys.platform != 'win32': - duplicate = os.dup - close = os.close - - # - # We define a Popen class similar to the one from subprocess, but - # whose constructor takes a process object as its argument. - # - - class Popen(object): - - def __init__(self, process_obj): - sys.stdout.flush() - sys.stderr.flush() - self.returncode = None - - r, w = os.pipe() - self.sentinel = r - - self.pid = os.fork() - if self.pid == 0: - os.close(r) - if 'random' in sys.modules: - import random - random.seed() - code = process_obj._bootstrap() - os._exit(code) - - # `w` will be closed when the child exits, at which point `r` - # will become ready for reading (using e.g. select()). - os.close(w) - util.Finalize(self, os.close, (r,)) - - def poll(self, flag=os.WNOHANG): - if self.returncode is None: - while True: - try: - pid, sts = os.waitpid(self.pid, flag) - except OSError as e: - if e.errno == errno.EINTR: - continue - # Child process not yet created. See #1731717 - # e.errno == errno.ECHILD == 10 - return None - else: - break - if pid == self.pid: - if os.WIFSIGNALED(sts): - self.returncode = -os.WTERMSIG(sts) - else: - assert os.WIFEXITED(sts) - self.returncode = os.WEXITSTATUS(sts) - return self.returncode - - def wait(self, timeout=None): - if self.returncode is None: - if timeout is not None: - from .connection import wait - if not wait([self.sentinel], timeout): - return None - # This shouldn't block if wait() returned successfully. - return self.poll(os.WNOHANG if timeout == 0.0 else 0) - return self.returncode - - def terminate(self): - if self.returncode is None: - try: - os.kill(self.pid, signal.SIGTERM) - except OSError: - if self.wait(timeout=0.1) is None: - raise - - @staticmethod - def thread_is_spawning(): - return False - -# -# Windows -# - -else: - import _thread - import msvcrt - import _winapi - - from pickle import load, HIGHEST_PROTOCOL - - def dump(obj, file, protocol=None): - ForkingPickler(file, protocol).dump(obj) - - # - # - # - - TERMINATE = 0x10000 - WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False)) - WINSERVICE = sys.executable.lower().endswith("pythonservice.exe") - - close = _winapi.CloseHandle - - # - # _python_exe is the assumed path to the python executable. - # People embedding Python want to modify it. - # - - if WINSERVICE: - _python_exe = os.path.join(sys.exec_prefix, 'python.exe') - else: - _python_exe = sys.executable - - def set_executable(exe): - global _python_exe - _python_exe = exe - - # - # - # - - def duplicate(handle, target_process=None, inheritable=False): - if target_process is None: - target_process = _winapi.GetCurrentProcess() - return _winapi.DuplicateHandle( - _winapi.GetCurrentProcess(), handle, target_process, - 0, inheritable, _winapi.DUPLICATE_SAME_ACCESS - ) - - # - # We define a Popen class similar to the one from subprocess, but - # whose constructor takes a process object as its argument. - # - - class Popen(object): - ''' - Start a subprocess to run the code of a process object - ''' - _tls = _thread._local() - - def __init__(self, process_obj): - cmd = ' '.join('"%s"' % x for x in get_command_line()) - prep_data = get_preparation_data(process_obj._name) - - # create pipe for communication with child - rfd, wfd = os.pipe() - - # get handle for read end of the pipe and make it inheritable - rhandle = duplicate(msvcrt.get_osfhandle(rfd), inheritable=True) - os.close(rfd) - - with open(wfd, 'wb', closefd=True) as to_child: - # start process - try: - hp, ht, pid, tid = _winapi.CreateProcess( - _python_exe, cmd + (' %s' % rhandle), - None, None, 1, 0, None, None, None - ) - _winapi.CloseHandle(ht) - finally: - close(rhandle) - - # set attributes of self - self.pid = pid - self.returncode = None - self._handle = hp - self.sentinel = int(hp) - util.Finalize(self, _winapi.CloseHandle, (self.sentinel,)) - - # send information to child - Popen._tls.process_handle = int(hp) - try: - dump(prep_data, to_child, HIGHEST_PROTOCOL) - dump(process_obj, to_child, HIGHEST_PROTOCOL) - finally: - del Popen._tls.process_handle - - @staticmethod - def thread_is_spawning(): - return getattr(Popen._tls, 'process_handle', None) is not None - - @staticmethod - def duplicate_for_child(handle): - return duplicate(handle, Popen._tls.process_handle) - - def wait(self, timeout=None): - if self.returncode is None: - if timeout is None: - msecs = _winapi.INFINITE - else: - msecs = max(0, int(timeout * 1000 + 0.5)) - - res = _winapi.WaitForSingleObject(int(self._handle), msecs) - if res == _winapi.WAIT_OBJECT_0: - code = _winapi.GetExitCodeProcess(self._handle) - if code == TERMINATE: - code = -signal.SIGTERM - self.returncode = code - - return self.returncode - - def poll(self): - return self.wait(timeout=0) - - def terminate(self): - if self.returncode is None: - try: - _winapi.TerminateProcess(int(self._handle), TERMINATE) - except OSError: - if self.wait(timeout=1.0) is None: - raise - - # - # - # - - def is_forking(argv): - ''' - Return whether commandline indicates we are forking - ''' - if len(argv) >= 2 and argv[1] == '--multiprocessing-fork': - assert len(argv) == 3 - return True - else: - return False - - - def freeze_support(): - ''' - Run code for process object if this in not the main process - ''' - if is_forking(sys.argv): - main() - sys.exit() - - - def get_command_line(): - ''' - Returns prefix of command line used for spawning a child process - ''' - if getattr(process.current_process(), '_inheriting', False): - raise RuntimeError(''' - Attempt to start a new process before the current process - has finished its bootstrapping phase. - - This probably means that you are on Windows and you have - forgotten to use the proper idiom in the main module: - - if __name__ == '__main__': - freeze_support() - ... - - The "freeze_support()" line can be omitted if the program - is not going to be frozen to produce a Windows executable.''') - - if getattr(sys, 'frozen', False): - return [sys.executable, '--multiprocessing-fork'] - else: - prog = 'from multiprocessing.forking import main; main()' - opts = util._args_from_interpreter_flags() - return [_python_exe] + opts + ['-c', prog, '--multiprocessing-fork'] - - - def main(): - ''' - Run code specifed by data received over pipe - ''' - assert is_forking(sys.argv) - - handle = int(sys.argv[-1]) - fd = msvcrt.open_osfhandle(handle, os.O_RDONLY) - from_parent = os.fdopen(fd, 'rb') - - process.current_process()._inheriting = True - preparation_data = load(from_parent) - prepare(preparation_data) - self = load(from_parent) - process.current_process()._inheriting = False - - from_parent.close() - - exitcode = self._bootstrap() - sys.exit(exitcode) - - - def get_preparation_data(name): - ''' - Return info about parent needed by child to unpickle process object - ''' - from .util import _logger, _log_to_stderr - - d = dict( - name=name, - sys_path=sys.path, - sys_argv=sys.argv, - log_to_stderr=_log_to_stderr, - orig_dir=process.ORIGINAL_DIR, - authkey=process.current_process().authkey, - ) - - if _logger is not None: - d['log_level'] = _logger.getEffectiveLevel() - - if not WINEXE and not WINSERVICE: - main_path = getattr(sys.modules['__main__'], '__file__', None) - if not main_path and sys.argv[0] not in ('', '-c'): - main_path = sys.argv[0] - if main_path is not None: - if not os.path.isabs(main_path) and \ - process.ORIGINAL_DIR is not None: - main_path = os.path.join(process.ORIGINAL_DIR, main_path) - d['main_path'] = os.path.normpath(main_path) - - return d - -# -# Prepare current process -# - -old_main_modules = [] - -def prepare(data): - ''' - Try to get current process ready to unpickle process object - ''' - old_main_modules.append(sys.modules['__main__']) - - if 'name' in data: - process.current_process().name = data['name'] - - if 'authkey' in data: - process.current_process()._authkey = data['authkey'] - - if 'log_to_stderr' in data and data['log_to_stderr']: - util.log_to_stderr() - - if 'log_level' in data: - util.get_logger().setLevel(data['log_level']) - - if 'sys_path' in data: - sys.path = data['sys_path'] - - if 'sys_argv' in data: - sys.argv = data['sys_argv'] - - if 'dir' in data: - os.chdir(data['dir']) - - if 'orig_dir' in data: - process.ORIGINAL_DIR = data['orig_dir'] - - if 'main_path' in data: - # XXX (ncoghlan): The following code makes several bogus - # assumptions regarding the relationship between __file__ - # and a module's real name. See PEP 302 and issue #10845 - main_path = data['main_path'] - main_name = os.path.splitext(os.path.basename(main_path))[0] - if main_name == '__init__': - main_name = os.path.basename(os.path.dirname(main_path)) - - if main_name == '__main__': - main_module = sys.modules['__main__'] - main_module.__file__ = main_path - elif main_name != 'ipython': - # Main modules not actually called __main__.py may - # contain additional code that should still be executed - import importlib - import types - - if main_path is None: - dirs = None - elif os.path.basename(main_path).startswith('__init__.py'): - dirs = [os.path.dirname(os.path.dirname(main_path))] - else: - dirs = [os.path.dirname(main_path)] - - assert main_name not in sys.modules, main_name - sys.modules.pop('__mp_main__', None) - # We should not try to load __main__ - # since that would execute 'if __name__ == "__main__"' - # clauses, potentially causing a psuedo fork bomb. - loader = importlib.find_loader(main_name, path=dirs) - main_module = types.ModuleType(main_name) - try: - loader.init_module_attrs(main_module) - except AttributeError: # init_module_attrs is optional - pass - main_module.__name__ = '__mp_main__' - code = loader.get_code(main_name) - exec(code, main_module.__dict__) - - sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module diff --git a/Lib/multiprocessing/forkserver.py b/Lib/multiprocessing/forkserver.py new file mode 100644 --- /dev/null +++ b/Lib/multiprocessing/forkserver.py @@ -0,0 +1,238 @@ +import errno +import os +import select +import signal +import socket +import struct +import sys +import threading + +from . import connection +from . import process +from . import reduction +from . import spawn +from . import util + +__all__ = ['ensure_running', 'get_inherited_fds', 'connect_to_new_process', + 'set_forkserver_preload'] + +# +# +# + +MAXFDS_TO_SEND = 256 +UNSIGNED_STRUCT = struct.Struct('Q') # large enough for pid_t + +_inherited_fds = None +_lock = threading.Lock() +_preload_modules = ['__main__'] + + +# +# Public function +# + +def set_forkserver_preload(modules_names): + '''Set list of module names to try to load in forkserver process.''' + global _preload_modules + _preload_modules = modules_names + + +def get_inherited_fds(): + '''Return list of fds inherited from parent process. + + This returns None if the current process was not started by fork server. + ''' + return _inherited_fds + + +def connect_to_new_process(fds): + '''Request forkserver to create a child process. + + Returns a pair of fds (status_r, data_w). The calling process can read + the child process's pid and (eventually) its returncode from status_r. + The calling process should write to data_w the pickled preparation and + process data. + ''' + if len(fds) + 3 >= MAXFDS_TO_SEND: + raise ValueError('too many fds') + address, alive_w = process.current_process()._config['forkserver_info'] + with socket.socket(socket.AF_UNIX) as client: + client.connect(address) + parent_r, child_w = util.pipe() + child_r, parent_w = util.pipe() + allfds = [child_r, child_w, alive_w] + allfds += fds + try: + reduction.sendfds(client, allfds) + return parent_r, parent_w + except: + os.close(parent_r) + os.close(parent_w) + raise + finally: + os.close(child_r) + os.close(child_w) + + +def ensure_running(): + '''Make sure that a fork server is running. + + This can be called from any process. Note that usually a child + process will just reuse the forkserver started by its parent, so + ensure_running() will do nothing. + ''' + with _lock: + config = process.current_process()._config + if config.get('forkserver_info') is not None: + return + + assert all(type(mod) is str for mod in _preload_modules) + semaphore_tracker_fd = config['semaphore_tracker_fd'] + cmd = ('from multiprocessing.forkserver import main; ' + + 'main(%d, %d, %r, **%r)') + + if _preload_modules: + desired_keys = {'main_path', 'sys_path'} + data = spawn.get_preparation_data('ignore') + data = dict((x,y) for (x,y) in data.items() if x in desired_keys) + else: + data = {} + + with socket.socket(socket.AF_UNIX) as listener: + address = connection.arbitrary_address('AF_UNIX') + listener.bind(address) + os.chmod(address, 0o600) + listener.listen(100) + + # all client processes own the write end of the "alive" pipe; + # when they all terminate the read end becomes ready. + alive_r, alive_w = os.pipe() + config['forkserver_info'] = (address, alive_w) + fds_to_pass = [listener.fileno(), alive_r, semaphore_tracker_fd] + cmd %= (listener.fileno(), alive_r, _preload_modules, data) + exe = spawn.get_executable() + args = [exe] + util._args_from_interpreter_flags() + ['-c', cmd] + pid = util.spawnv_passfds(exe, args, fds_to_pass) + + +def main(listener_fd, alive_r, preload, main_path=None, sys_path=None): + '''Run forkserver.''' + if preload: + if '__main__' in preload and main_path is not None: + process.current_process()._inheriting = True + try: + spawn.import_main_path(main_path) + finally: + del process.current_process()._inheriting + for modname in preload: + try: + __import__(modname) + except ImportError: + pass + + # close sys.stdin + if sys.stdin is not None: + try: + sys.stdin.close() + sys.stdin = open(os.devnull) + except (OSError, ValueError): + pass + + # ignoring SIGCHLD means no need to reap zombie processes + handler = signal.signal(signal.SIGCHLD, signal.SIG_IGN) + with socket.socket(socket.AF_UNIX, fileno=listener_fd) as listener: + readers = [listener, alive_r] + + while True: + try: + rfds, wfds, xfds = select.select(readers, [], []) + + if alive_r in rfds: + # EOF because no more client processes left + assert os.read(alive_r, 1) == b'' + raise SystemExit + + assert listener in rfds + with listener.accept()[0] as s: + code = 1 + if os.fork() == 0: + try: + _serve_one(s, listener, alive_r, handler) + except Exception: + sys.excepthook(*sys.exc_info()) + sys.stderr.flush() + finally: + os._exit(code) + + except InterruptedError: + pass + except OSError as e: + if e.errno != errno.ECONNABORTED: + raise + +# +# Code to bootstrap new process +# + +def _serve_one(s, listener, alive_r, handler): + global _inherited_fds + + # close unnecessary stuff and reset SIGCHLD handler + listener.close() + os.close(alive_r) + signal.signal(signal.SIGCHLD, handler) + + # receive fds from parent process + fds = reduction.recvfds(s, MAXFDS_TO_SEND + 1) + s.close() + assert len(fds) <= MAXFDS_TO_SEND + child_r, child_w, alive_w, *_inherited_fds = fds + + # send pid to client processes + write_unsigned(child_w, os.getpid()) + + # reseed random number generator + if 'random' in sys.modules: + import random + random.seed() + + # run process object received over pipe + code = spawn._main(child_r) + + # write the exit code to the pipe + write_unsigned(child_w, code) + +# +# Read and write unsigned numbers +# + +def read_unsigned(fd): + data = b'' + length = UNSIGNED_STRUCT.size + while len(data) < length: + while True: + try: + s = os.read(fd, length - len(data)) + except InterruptedError: + pass + else: + break + if not s: + raise EOFError('unexpected EOF') + data += s + return UNSIGNED_STRUCT.unpack(data)[0] + +def write_unsigned(fd, n): + msg = UNSIGNED_STRUCT.pack(n) + while msg: + while True: + try: + nbytes = os.write(fd, msg) + except InterruptedError: + pass + else: + break + if nbytes == 0: + raise RuntimeError('should not get here') + msg = msg[nbytes:] diff --git a/Lib/multiprocessing/heap.py b/Lib/multiprocessing/heap.py --- a/Lib/multiprocessing/heap.py +++ b/Lib/multiprocessing/heap.py @@ -8,15 +8,17 @@ # import bisect +import itertools import mmap import os import sys +import tempfile import threading -import itertools +import _multiprocessing -import _multiprocessing -from multiprocessing.util import Finalize, info -from multiprocessing.forking import assert_spawning +from . import popen +from . import reduction +from . import util __all__ = ['BufferWrapper'] @@ -30,17 +32,25 @@ class Arena(object): - _counter = itertools.count() + _rand = tempfile._RandomNameSequence() def __init__(self, size): self.size = size - self.name = 'pym-%d-%d' % (os.getpid(), next(Arena._counter)) - self.buffer = mmap.mmap(-1, self.size, tagname=self.name) - assert _winapi.GetLastError() == 0, 'tagname already in use' + for i in range(100): + name = 'pym-%d-%s' % (os.getpid(), next(self._rand)) + buf = mmap.mmap(-1, size, tagname=name) + if _winapi.GetLastError() == 0: + break + # We have reopened a preexisting mmap. + buf.close() + else: + raise FileExistsError('Cannot find name for new mmap') + self.name = name + self.buffer = buf self._state = (self.size, self.name) def __getstate__(self): - assert_spawning(self) + popen.assert_spawning(self) return self._state def __setstate__(self, state): @@ -52,10 +62,28 @@ class Arena(object): - def __init__(self, size): - self.buffer = mmap.mmap(-1, size) + def __init__(self, size, fd=-1): self.size = size - self.name = None + self.fd = fd + if fd == -1: + self.fd, name = tempfile.mkstemp( + prefix='pym-%d-'%os.getpid(), dir=util.get_temp_dir()) + os.unlink(name) + util.Finalize(self, os.close, (self.fd,)) + with open(self.fd, 'wb', closefd=False) as f: + f.write(b'\0'*size) + self.buffer = mmap.mmap(self.fd, self.size) + + def reduce_arena(a): + if a.fd == -1: + raise ValueError('Arena is unpicklable because ' + 'forking was enabled when it was created') + return rebuild_arena, (a.size, reduction.DupFd(a.fd)) + + def rebuild_arena(size, dupfd): + return Arena(size, dupfd.detach()) + + reduction.register(Arena, reduce_arena) # # Class allowing allocation of chunks of memory from arenas @@ -90,7 +118,7 @@ if i == len(self._lengths): length = self._roundup(max(self._size, size), mmap.PAGESIZE) self._size *= 2 - info('allocating a new mmap of length %d', length) + util.info('allocating a new mmap of length %d', length) arena = Arena(length) self._arenas.append(arena) return (arena, 0, length) @@ -216,7 +244,7 @@ assert 0 <= size < sys.maxsize block = BufferWrapper._heap.malloc(size) self._state = (block, size) - Finalize(self, BufferWrapper._heap.free, args=(block,)) + util.Finalize(self, BufferWrapper._heap.free, args=(block,)) def create_memoryview(self): (arena, start, stop), size = self._state diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -19,11 +19,15 @@ import array import queue +from time import time as _time from traceback import format_exc -from multiprocessing import Process, current_process, active_children, Pool, util, connection -from multiprocessing.process import AuthenticationString -from multiprocessing.forking import Popen, ForkingPickler -from time import time as _time + +from . import connection +from . import pool +from . import process +from . import popen +from . import reduction +from . import util # # Register some things for pickling @@ -31,16 +35,14 @@ def reduce_array(a): return array.array, (a.typecode, a.tobytes()) -ForkingPickler.register(array.array, reduce_array) +reduction.register(array.array, reduce_array) view_types = [type(getattr({}, name)()) for name in ('items','keys','values')] if view_types[0] is not list: # only needed in Py3.0 def rebuild_as_list(obj): return list, (list(obj),) for view_type in view_types: - ForkingPickler.register(view_type, rebuild_as_list) - import copyreg - copyreg.pickle(view_type, rebuild_as_list) + reduction.register(view_type, rebuild_as_list) # # Type for identifying shared objects @@ -130,7 +132,7 @@ def __init__(self, registry, address, authkey, serializer): assert isinstance(authkey, bytes) self.registry = registry - self.authkey = AuthenticationString(authkey) + self.authkey = process.AuthenticationString(authkey) Listener, Client = listener_client[serializer] # do authentication later @@ -146,7 +148,7 @@ Run the server forever ''' self.stop_event = threading.Event() - current_process()._manager_server = self + process.current_process()._manager_server = self try: accepter = threading.Thread(target=self.accepter) accepter.daemon = True @@ -438,9 +440,9 @@ def __init__(self, address=None, authkey=None, serializer='pickle'): if authkey is None: - authkey = current_process().authkey + authkey = process.current_process().authkey self._address = address # XXX not final address if eg ('', 0) - self._authkey = AuthenticationString(authkey) + self._authkey = process.AuthenticationString(authkey) self._state = State() self._state.value = State.INITIAL self._serializer = serializer @@ -476,7 +478,7 @@ reader, writer = connection.Pipe(duplex=False) # spawn process which runs a server - self._process = Process( + self._process = process.Process( target=type(self)._run_server, args=(self._registry, self._address, self._authkey, self._serializer, writer, initializer, initargs), @@ -691,11 +693,11 @@ self._Client = listener_client[serializer][1] if authkey is not None: - self._authkey = AuthenticationString(authkey) + self._authkey = process.AuthenticationString(authkey) elif self._manager is not None: self._authkey = self._manager._authkey else: - self._authkey = current_process().authkey + self._authkey = process.current_process().authkey if incref: self._incref() @@ -704,7 +706,7 @@ def _connect(self): util.debug('making connection to manager') - name = current_process().name + name = process.current_process().name if threading.current_thread().name != 'MainThread': name += '|' + threading.current_thread().name conn = self._Client(self._token.address, authkey=self._authkey) @@ -798,7 +800,7 @@ def __reduce__(self): kwds = {} - if Popen.thread_is_spawning(): + if popen.get_spawning_popen() is not None: kwds['authkey'] = self._authkey if getattr(self, '_isauto', False): @@ -835,14 +837,14 @@ If possible the shared object is returned, or otherwise a proxy for it. ''' - server = getattr(current_process(), '_manager_server', None) + server = getattr(process.current_process(), '_manager_server', None) if server and server.address == token.address: return server.id_to_obj[token.id][0] else: incref = ( kwds.pop('incref', True) and - not getattr(current_process(), '_inheriting', False) + not getattr(process.current_process(), '_inheriting', False) ) return func(token, serializer, incref=incref, **kwds) @@ -889,7 +891,7 @@ if authkey is None and manager is not None: authkey = manager._authkey if authkey is None: - authkey = current_process().authkey + authkey = process.current_process().authkey ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed) proxy = ProxyType(token, serializer, manager=manager, authkey=authkey, @@ -1109,7 +1111,7 @@ AcquirerProxy) SyncManager.register('Condition', threading.Condition, ConditionProxy) SyncManager.register('Barrier', threading.Barrier, BarrierProxy) -SyncManager.register('Pool', Pool, PoolProxy) +SyncManager.register('Pool', pool.Pool, PoolProxy) SyncManager.register('list', list, ListProxy) SyncManager.register('dict', dict, DictProxy) SyncManager.register('Value', Value, ValueProxy) diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py --- a/Lib/multiprocessing/pool.py +++ b/Lib/multiprocessing/pool.py @@ -7,7 +7,7 @@ # Licensed to PSF under a Contributor Agreement. # -__all__ = ['Pool'] +__all__ = ['Pool', 'ThreadPool'] # # Imports @@ -21,8 +21,10 @@ import time import traceback -from multiprocessing import Process, TimeoutError -from multiprocessing.util import Finalize, debug +# If threading is available then ThreadPool should be provided. Therefore +# we avoid top-level imports which are liable to fail on some systems. +from . import util +from . import Process, cpu_count, TimeoutError, SimpleQueue # # Constants representing the state of a pool @@ -104,11 +106,11 @@ try: task = get() except (EOFError, OSError): - debug('worker got EOFError or OSError -- exiting') + util.debug('worker got EOFError or OSError -- exiting') break if task is None: - debug('worker got sentinel -- exiting') + util.debug('worker got sentinel -- exiting') break job, i, func, args, kwds = task @@ -121,11 +123,11 @@ put((job, i, result)) except Exception as e: wrapped = MaybeEncodingError(e, result[1]) - debug("Possible encoding error while sending result: %s" % ( + util.debug("Possible encoding error while sending result: %s" % ( wrapped)) put((job, i, (False, wrapped))) completed += 1 - debug('worker exiting after %d tasks' % completed) + util.debug('worker exiting after %d tasks' % completed) # # Class representing a process pool @@ -184,7 +186,7 @@ self._result_handler._state = RUN self._result_handler.start() - self._terminate = Finalize( + self._terminate = util.Finalize( self, self._terminate_pool, args=(self._taskqueue, self._inqueue, self._outqueue, self._pool, self._worker_handler, self._task_handler, @@ -201,7 +203,7 @@ worker = self._pool[i] if worker.exitcode is not None: # worker exited - debug('cleaning up worker %d' % i) + util.debug('cleaning up worker %d' % i) worker.join() cleaned = True del self._pool[i] @@ -221,7 +223,7 @@ w.name = w.name.replace('Process', 'PoolWorker') w.daemon = True w.start() - debug('added worker') + util.debug('added worker') def _maintain_pool(self): """Clean up any exited workers and start replacements for them. @@ -230,7 +232,6 @@ self._repopulate_pool() def _setup_queues(self): - from .queues import SimpleQueue self._inqueue = SimpleQueue() self._outqueue = SimpleQueue() self._quick_put = self._inqueue._writer.send @@ -358,7 +359,7 @@ time.sleep(0.1) # send sentinel to stop workers pool._taskqueue.put(None) - debug('worker handler exiting') + util.debug('worker handler exiting') @staticmethod def _handle_tasks(taskqueue, put, outqueue, pool): @@ -368,36 +369,36 @@ i = -1 for i, task in enumerate(taskseq): if thread._state: - debug('task handler found thread._state != RUN') + util.debug('task handler found thread._state != RUN') break try: put(task) except OSError: - debug('could not put task on queue') + util.debug('could not put task on queue') break else: if set_length: - debug('doing set_length()') + util.debug('doing set_length()') set_length(i+1) continue break else: - debug('task handler got sentinel') + util.debug('task handler got sentinel') try: # tell result handler to finish when cache is empty - debug('task handler sending sentinel to result handler') + util.debug('task handler sending sentinel to result handler') outqueue.put(None) # tell workers there is no more work - debug('task handler sending sentinel to workers') + util.debug('task handler sending sentinel to workers') for p in pool: put(None) except OSError: - debug('task handler got OSError when sending sentinels') + util.debug('task handler got OSError when sending sentinels') - debug('task handler exiting') + util.debug('task handler exiting') @staticmethod def _handle_results(outqueue, get, cache): @@ -407,16 +408,16 @@ try: task = get() except (OSError, EOFError): - debug('result handler got EOFError/OSError -- exiting') + util.debug('result handler got EOFError/OSError -- exiting') return if thread._state: assert thread._state == TERMINATE - debug('result handler found thread._state=TERMINATE') + util.debug('result handler found thread._state=TERMINATE') break if task is None: - debug('result handler got sentinel') + util.debug('result handler got sentinel') break job, i, obj = task @@ -429,11 +430,11 @@ try: task = get() except (OSError, EOFError): - debug('result handler got EOFError/OSError -- exiting') + util.debug('result handler got EOFError/OSError -- exiting') return if task is None: - debug('result handler ignoring extra sentinel') + util.debug('result handler ignoring extra sentinel') continue job, i, obj = task try: @@ -442,7 +443,7 @@ pass if hasattr(outqueue, '_reader'): - debug('ensuring that outqueue is not full') + util.debug('ensuring that outqueue is not full') # If we don't make room available in outqueue then # attempts to add the sentinel (None) to outqueue may # block. There is guaranteed to be no more than 2 sentinels. @@ -454,7 +455,7 @@ except (OSError, EOFError): pass - debug('result handler exiting: len(cache)=%s, thread._state=%s', + util.debug('result handler exiting: len(cache)=%s, thread._state=%s', len(cache), thread._state) @staticmethod @@ -472,19 +473,19 @@ ) def close(self): - debug('closing pool') + util.debug('closing pool') if self._state == RUN: self._state = CLOSE self._worker_handler._state = CLOSE def terminate(self): - debug('terminating pool') + util.debug('terminating pool') self._state = TERMINATE self._worker_handler._state = TERMINATE self._terminate() def join(self): - debug('joining pool') + util.debug('joining pool') assert self._state in (CLOSE, TERMINATE) self._worker_handler.join() self._task_handler.join() @@ -495,7 +496,7 @@ @staticmethod def _help_stuff_finish(inqueue, task_handler, size): # task_handler may be blocked trying to put items on inqueue - debug('removing tasks from inqueue until task handler finished') + util.debug('removing tasks from inqueue until task handler finished') inqueue._rlock.acquire() while task_handler.is_alive() and inqueue._reader.poll(): inqueue._reader.recv() @@ -505,12 +506,12 @@ def _terminate_pool(cls, taskqueue, inqueue, outqueue, pool, worker_handler, task_handler, result_handler, cache): # this is guaranteed to only be called once - debug('finalizing pool') + util.debug('finalizing pool') worker_handler._state = TERMINATE task_handler._state = TERMINATE - debug('helping task handler/workers to finish') + util.debug('helping task handler/workers to finish') cls._help_stuff_finish(inqueue, task_handler, len(pool)) assert result_handler.is_alive() or len(cache) == 0 @@ -520,31 +521,31 @@ # We must wait for the worker handler to exit before terminating # workers because we don't want workers to be restarted behind our back. - debug('joining worker handler') + util.debug('joining worker handler') if threading.current_thread() is not worker_handler: worker_handler.join() # Terminate workers which haven't already finished. if pool and hasattr(pool[0], 'terminate'): - debug('terminating workers') + util.debug('terminating workers') for p in pool: if p.exitcode is None: p.terminate() - debug('joining task handler') + util.debug('joining task handler') if threading.current_thread() is not task_handler: task_handler.join() - debug('joining result handler') + util.debug('joining result handler') if threading.current_thread() is not result_handler: result_handler.join() if pool and hasattr(pool[0], 'terminate'): - debug('joining pool workers') + util.debug('joining pool workers') for p in pool: if p.is_alive(): # worker has not yet exited - debug('cleaning up worker %d' % p.pid) + util.debug('cleaning up worker %d' % p.pid) p.join() def __enter__(self): @@ -730,7 +731,10 @@ class ThreadPool(Pool): - from .dummy import Process + @staticmethod + def Process(*args, **kwds): + from .dummy import Process + return Process(*args, **kwds) def __init__(self, processes=None, initializer=None, initargs=()): Pool.__init__(self, processes, initializer, initargs) diff --git a/Lib/multiprocessing/popen.py b/Lib/multiprocessing/popen.py new file mode 100644 --- /dev/null +++ b/Lib/multiprocessing/popen.py @@ -0,0 +1,78 @@ +import sys +import threading + +__all__ = ['Popen', 'get_spawning_popen', 'set_spawning_popen', + 'assert_spawning'] + +# +# Check that the current thread is spawning a child process +# + +_tls = threading.local() + +def get_spawning_popen(): + return getattr(_tls, 'spawning_popen', None) + +def set_spawning_popen(popen): + _tls.spawning_popen = popen + +def assert_spawning(obj): + if get_spawning_popen() is None: + raise RuntimeError( + '%s objects should only be shared between processes' + ' through inheritance' % type(obj).__name__ + ) + +# +# +# + +_Popen = None + +def Popen(process_obj): + if _Popen is None: + set_start_method() + return _Popen(process_obj) + +def get_start_method(): + if _Popen is None: + set_start_method() + return _Popen.method + +def set_start_method(meth=None, *, start_helpers=True): + global _Popen + try: + modname = _method_to_module[meth] + __import__(modname) + except (KeyError, ImportError): + raise ValueError('could not use start method %r' % meth) + module = sys.modules[modname] + if start_helpers: + module.Popen.ensure_helpers_running() + _Popen = module.Popen + + +if sys.platform == 'win32': + + _method_to_module = { + None: 'multiprocessing.popen_spawn_win32', + 'spawn': 'multiprocessing.popen_spawn_win32', + } + + def get_all_start_methods(): + return ['spawn'] + +else: + _method_to_module = { + None: 'multiprocessing.popen_fork', + 'fork': 'multiprocessing.popen_fork', + 'spawn': 'multiprocessing.popen_spawn_posix', + 'forkserver': 'multiprocessing.popen_forkserver', + } + + def get_all_start_methods(): + from . import reduction + if reduction.HAVE_SEND_HANDLE: + return ['fork', 'spawn', 'forkserver'] + else: + return ['fork', 'spawn'] diff --git a/Lib/multiprocessing/popen_fork.py b/Lib/multiprocessing/popen_fork.py new file mode 100644 --- /dev/null +++ b/Lib/multiprocessing/popen_fork.py @@ -0,0 +1,87 @@ +import os +import sys +import signal +import errno + +from . import util + +__all__ = ['Popen'] + +# +# Start child process using fork +# + +class Popen(object): + method = 'fork' + + def __init__(self, process_obj): + sys.stdout.flush() + sys.stderr.flush() + self.returncode = None + self._launch(process_obj) + + def duplicate_for_child(self, fd): + return fd + + def poll(self, flag=os.WNOHANG): + if self.returncode is None: + while True: + try: + pid, sts = os.waitpid(self.pid, flag) + except OSError as e: + if e.errno == errno.EINTR: + continue + # Child process not yet created. See #1731717 + # e.errno == errno.ECHILD == 10 + return None + else: + break + if pid == self.pid: + if os.WIFSIGNALED(sts): + self.returncode = -os.WTERMSIG(sts) + else: + assert os.WIFEXITED(sts) + self.returncode = os.WEXITSTATUS(sts) + return self.returncode + + def wait(self, timeout=None): + if self.returncode is None: + if timeout is not None: + from .connection import wait + if not wait([self.sentinel], timeout): + return None + # This shouldn't block if wait() returned successfully. + return self.poll(os.WNOHANG if timeout == 0.0 else 0) + return self.returncode + + def terminate(self): + if self.returncode is None: + try: + os.kill(self.pid, signal.SIGTERM) + except ProcessLookupError: + pass + except OSError: + if self.wait(timeout=0.1) is None: + raise + + def _launch(self, process_obj): + code = 1 + parent_r, child_w = util.pipe() + self.pid = os.fork() + if self.pid == 0: + try: + os.close(parent_r) + if 'random' in sys.modules: + import random + random.seed() + code = process_obj._bootstrap() + finally: + os._exit(code) + else: + os.close(child_w) + util.Finalize(self, os.close, (parent_r,)) + self.sentinel = parent_r + + @staticmethod + def ensure_helpers_running(): + pass diff --git a/Lib/multiprocessing/popen_forkserver.py b/Lib/multiprocessing/popen_forkserver.py new file mode 100644 --- /dev/null +++ b/Lib/multiprocessing/popen_forkserver.py @@ -0,0 +1,75 @@ +import io +import os + +from . import reduction +if not reduction.HAVE_SEND_HANDLE: + raise ImportError('No support for sending fds between processes') +from . import forkserver +from . import popen +from . import popen_fork +from . import spawn +from . import util + + +__all__ = ['Popen'] + +# +# Wrapper for an fd used while launching a process +# + +class _DupFd(object): + def __init__(self, ind): + self.ind = ind + def detach(self): + return forkserver.get_inherited_fds()[self.ind] + +# +# Start child process using a server process +# + +class Popen(popen_fork.Popen): + method = 'forkserver' + DupFd = _DupFd + + def __init__(self, process_obj): + self._fds = [] + super().__init__(process_obj) + + def duplicate_for_child(self, fd): + self._fds.append(fd) + return len(self._fds) - 1 + + def _launch(self, process_obj): + prep_data = spawn.get_preparation_data(process_obj._name) + buf = io.BytesIO() + popen.set_spawning_popen(self) + try: + reduction.dump(prep_data, buf) + reduction.dump(process_obj, buf) + finally: + popen.set_spawning_popen(None) + + self.sentinel, w = forkserver.connect_to_new_process(self._fds) + util.Finalize(self, os.close, (self.sentinel,)) + with open(w, 'wb', closefd=True) as f: + f.write(buf.getbuffer()) + self.pid = forkserver.read_unsigned(self.sentinel) + + def poll(self, flag=os.WNOHANG): + if self.returncode is None: + from .connection import wait + timeout = 0 if flag == os.WNOHANG else None + if not wait([self.sentinel], timeout): + return None + try: + self.returncode = forkserver.read_unsigned(self.sentinel) + except (OSError, EOFError): + # The process ended abnormally perhaps because of a signal + self.returncode = 255 + return self.returncode + + @staticmethod + def ensure_helpers_running(): + from . import semaphore_tracker + semaphore_tracker.ensure_running() + forkserver.ensure_running() diff --git a/Lib/multiprocessing/popen_spawn_posix.py b/Lib/multiprocessing/popen_spawn_posix.py new file mode 100644 --- /dev/null +++ b/Lib/multiprocessing/popen_spawn_posix.py @@ -0,0 +1,75 @@ +import fcntl +import io +import os + +from . import popen +from . import popen_fork +from . import reduction +from . import spawn +from . import util + +from . import current_process + +__all__ = ['Popen'] + + +# +# Wrapper for an fd used while launching a process +# + +class _DupFd(object): + def __init__(self, fd): + self.fd = fd + def detach(self): + return self.fd + +# +# Start child process using a fresh interpreter +# + +class Popen(popen_fork.Popen): + method = 'spawn' + DupFd = _DupFd + + def __init__(self, process_obj): + self._fds = [] + super().__init__(process_obj) + + def duplicate_for_child(self, fd): + self._fds.append(fd) + return fd + + def _launch(self, process_obj): + tracker_fd = current_process()._config['semaphore_tracker_fd'] + self._fds.append(tracker_fd) + prep_data = spawn.get_preparation_data(process_obj._name) + fp = io.BytesIO() + popen.set_spawning_popen(self) + try: + reduction.dump(prep_data, fp) + reduction.dump(process_obj, fp) + finally: + popen.set_spawning_popen(None) + + parent_r = child_w = child_r = parent_w = None + try: + parent_r, child_w = util.pipe() + child_r, parent_w = util.pipe() + cmd = spawn.get_command_line() + [str(child_r)] + self._fds.extend([child_r, child_w]) + self.pid = util.spawnv_passfds(spawn.get_executable(), + cmd, self._fds) + self.sentinel = parent_r + with open(parent_w, 'wb', closefd=False) as f: + f.write(fp.getbuffer()) + finally: + if parent_r is not None: + util.Finalize(self, os.close, (parent_r,)) + for fd in (child_r, child_w, parent_w): + if fd is not None: + os.close(fd) + + @staticmethod + def ensure_helpers_running(): + from . import semaphore_tracker + semaphore_tracker.ensure_running() diff --git a/Lib/multiprocessing/popen_spawn_win32.py b/Lib/multiprocessing/popen_spawn_win32.py new file mode 100644 --- /dev/null +++ b/Lib/multiprocessing/popen_spawn_win32.py @@ -0,0 +1,102 @@ +import os +import msvcrt +import signal +import sys +import _winapi + +from . import spawn +from . import popen +from . import reduction +from . import util + +__all__ = ['Popen'] + +# +# +# + +TERMINATE = 0x10000 +WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False)) +WINSERVICE = sys.executable.lower().endswith("pythonservice.exe") + +# +# We define a Popen class similar to the one from subprocess, but +# whose constructor takes a process object as its argument. +# + +class Popen(object): + ''' + Start a subprocess to run the code of a process object + ''' + method = 'spawn' + + def __init__(self, process_obj): + prep_data = spawn.get_preparation_data(process_obj._name) + cmd = ' '.join('"%s"' % x for x in spawn.get_command_line()) + + # read end of pipe will be "stolen" by the child process + # -- see spawn_main() in spawn.py. + rhandle, whandle = _winapi.CreatePipe(None, 0) + wfd = msvcrt.open_osfhandle(whandle, 0) + cmd += ' {} {}'.format(os.getpid(), rhandle) + + with open(wfd, 'wb', closefd=True) as to_child: + # start process + try: + hp, ht, pid, tid = _winapi.CreateProcess( + spawn.get_executable(), cmd, + None, None, False, 0, None, None, None) + _winapi.CloseHandle(ht) + except: + _winapi.CloseHandle(rhandle) + raise + + # set attributes of self + self.pid = pid + self.returncode = None + self._handle = hp + self.sentinel = int(hp) + util.Finalize(self, _winapi.CloseHandle, (self.sentinel,)) + + # send information to child + popen.set_spawning_popen(self) + try: + reduction.dump(prep_data, to_child) + reduction.dump(process_obj, to_child) + finally: + popen.set_spawning_popen(None) + + def duplicate_for_child(self, handle): + assert self is popen.get_spawning_popen() + return reduction.duplicate(handle, self.sentinel) + + def wait(self, timeout=None): + if self.returncode is None: + if timeout is None: + msecs = _winapi.INFINITE + else: + msecs = max(0, int(timeout * 1000 + 0.5)) + + res = _winapi.WaitForSingleObject(int(self._handle), msecs) + if res == _winapi.WAIT_OBJECT_0: + code = _winapi.GetExitCodeProcess(self._handle) + if code == TERMINATE: + code = -signal.SIGTERM + self.returncode = code + + return self.returncode + + def poll(self): + return self.wait(timeout=0) + + def terminate(self): + if self.returncode is None: + try: + _winapi.TerminateProcess(int(self._handle), TERMINATE) + except OSError: + if self.wait(timeout=1.0) is None: + raise + + @staticmethod + def ensure_helpers_running(): + pass diff --git a/Lib/multiprocessing/process.py b/Lib/multiprocessing/process.py --- a/Lib/multiprocessing/process.py +++ b/Lib/multiprocessing/process.py @@ -43,7 +43,7 @@ Return list of process objects corresponding to live child processes ''' _cleanup() - return list(_current_process._children) + return list(_children) # # @@ -51,9 +51,9 @@ def _cleanup(): # check for processes which have finished - for p in list(_current_process._children): + for p in list(_children): if p._popen.poll() is not None: - _current_process._children.discard(p) + _children.discard(p) # # The `Process` class @@ -63,21 +63,16 @@ ''' Process objects represent activity that is run in a separate process - The class is analagous to `threading.Thread` + The class is analogous to `threading.Thread` ''' _Popen = None def __init__(self, group=None, target=None, name=None, args=(), kwargs={}, *, daemon=None): assert group is None, 'group argument must be None for now' - count = next(_current_process._counter) + count = next(_process_counter) self._identity = _current_process._identity + (count,) - self._authkey = _current_process._authkey - if daemon is not None: - self._daemonic = daemon - else: - self._daemonic = _current_process._daemonic - self._tempdir = _current_process._tempdir + self._config = _current_process._config.copy() self._parent_pid = os.getpid() self._popen = None self._target = target @@ -85,6 +80,8 @@ self._kwargs = dict(kwargs) self._name = name or type(self).__name__ + '-' + \ ':'.join(str(i) for i in self._identity) + if daemon is not None: + self.daemon = daemon _dangling.add(self) def run(self): @@ -101,16 +98,16 @@ assert self._popen is None, 'cannot start a process twice' assert self._parent_pid == os.getpid(), \ 'can only start a process object created by current process' - assert not _current_process._daemonic, \ + assert not _current_process._config.get('daemon'), \ 'daemonic processes are not allowed to have children' _cleanup() if self._Popen is not None: Popen = self._Popen else: - from .forking import Popen + from .popen import Popen self._popen = Popen(self) self._sentinel = self._popen.sentinel - _current_process._children.add(self) + _children.add(self) def terminate(self): ''' @@ -126,7 +123,7 @@ assert self._popen is not None, 'can only join a started process' res = self._popen.wait(timeout) if res is not None: - _current_process._children.discard(self) + _children.discard(self) def is_alive(self): ''' @@ -154,7 +151,7 @@ ''' Return whether process is a daemon ''' - return self._daemonic + return self._config.get('daemon', False) @daemon.setter def daemon(self, daemonic): @@ -162,18 +159,18 @@ Set whether process is a daemon ''' assert self._popen is None, 'process has already started' - self._daemonic = daemonic + self._config['daemon'] = daemonic @property def authkey(self): - return self._authkey + return self._config['authkey'] @authkey.setter def authkey(self, authkey): ''' Set authorization key of process ''' - self._authkey = AuthenticationString(authkey) + self._config['authkey'] = AuthenticationString(authkey) @property def exitcode(self): @@ -227,17 +224,17 @@ status = 'stopped[%s]' % _exitcode_to_name.get(status, status) return '<%s(%s, %s%s)>' % (type(self).__name__, self._name, - status, self._daemonic and ' daemon' or '') + status, self.daemon and ' daemon' or '') ## def _bootstrap(self): from . import util - global _current_process + global _current_process, _process_counter, _children try: - self._children = set() - self._counter = itertools.count(1) + _process_counter = itertools.count(1) + _children = set() if sys.stdin is not None: try: sys.stdin.close() @@ -285,8 +282,8 @@ class AuthenticationString(bytes): def __reduce__(self): - from .forking import Popen - if not Popen.thread_is_spawning(): + from .popen import get_spawning_popen + if get_spawning_popen() is None: raise TypeError( 'Pickling an AuthenticationString object is ' 'disallowed for security reasons' @@ -301,16 +298,19 @@ def __init__(self): self._identity = () - self._daemonic = False self._name = 'MainProcess' self._parent_pid = None self._popen = None - self._counter = itertools.count(1) - self._children = set() - self._authkey = AuthenticationString(os.urandom(32)) - self._tempdir = None + self._config = {'authkey': AuthenticationString(os.urandom(32)), + 'semprefix': 'mp'} + # Note that some versions of FreeBSD only allow named + # semaphores to have names of up to 14 characters. Therfore + # we choose a short prefix. + _current_process = _MainProcess() +_process_counter = itertools.count(1) +_children = set() del _MainProcess # diff --git a/Lib/multiprocessing/queues.py b/Lib/multiprocessing/queues.py --- a/Lib/multiprocessing/queues.py +++ b/Lib/multiprocessing/queues.py @@ -18,11 +18,15 @@ import errno from queue import Empty, Full + import _multiprocessing -from multiprocessing.connection import Pipe -from multiprocessing.synchronize import Lock, BoundedSemaphore, Semaphore, Condition -from multiprocessing.util import debug, info, Finalize, register_after_fork -from multiprocessing.forking import assert_spawning, ForkingPickler + +from . import connection +from . import popen +from . import synchronize + +from .util import debug, info, Finalize, register_after_fork, is_exiting +from .reduction import ForkingPickler # # Queue type using a pipe, buffer and thread @@ -34,14 +38,14 @@ if maxsize <= 0: maxsize = _multiprocessing.SemLock.SEM_VALUE_MAX self._maxsize = maxsize - self._reader, self._writer = Pipe(duplex=False) - self._rlock = Lock() + self._reader, self._writer = connection.Pipe(duplex=False) + self._rlock = synchronize.Lock() self._opid = os.getpid() if sys.platform == 'win32': self._wlock = None else: - self._wlock = Lock() - self._sem = BoundedSemaphore(maxsize) + self._wlock = synchronize.Lock() + self._sem = synchronize.BoundedSemaphore(maxsize) # For use by concurrent.futures self._ignore_epipe = False @@ -51,7 +55,7 @@ register_after_fork(self, Queue._after_fork) def __getstate__(self): - assert_spawning(self) + popen.assert_spawning(self) return (self._ignore_epipe, self._maxsize, self._reader, self._writer, self._rlock, self._wlock, self._sem, self._opid) @@ -208,8 +212,6 @@ @staticmethod def _feed(buffer, notempty, send_bytes, writelock, close, ignore_epipe): debug('starting thread to feed data to pipe') - from .util import is_exiting - nacquire = notempty.acquire nrelease = notempty.release nwait = notempty.wait @@ -279,8 +281,8 @@ def __init__(self, maxsize=0): Queue.__init__(self, maxsize) - self._unfinished_tasks = Semaphore(0) - self._cond = Condition() + self._unfinished_tasks = synchronize.Semaphore(0) + self._cond = synchronize.Condition() def __getstate__(self): return Queue.__getstate__(self) + (self._cond, self._unfinished_tasks) @@ -331,19 +333,19 @@ class SimpleQueue(object): def __init__(self): - self._reader, self._writer = Pipe(duplex=False) - self._rlock = Lock() + self._reader, self._writer = connection.Pipe(duplex=False) + self._rlock = synchronize.Lock() self._poll = self._reader.poll if sys.platform == 'win32': self._wlock = None else: - self._wlock = Lock() + self._wlock = synchronize.Lock() def empty(self): return not self._poll() def __getstate__(self): - assert_spawning(self) + popen.assert_spawning(self) return (self._reader, self._writer, self._rlock, self._wlock) def __setstate__(self, state): diff --git a/Lib/multiprocessing/reduction.py b/Lib/multiprocessing/reduction.py --- a/Lib/multiprocessing/reduction.py +++ b/Lib/multiprocessing/reduction.py @@ -1,6 +1,5 @@ # -# Module to allow connection and socket objects to be transferred -# between processes +# Module which deals with pickling of objects. # # multiprocessing/reduction.py # @@ -8,27 +7,57 @@ # Licensed to PSF under a Contributor Agreement. # -__all__ = ['reduce_socket', 'reduce_connection', 'send_handle', 'recv_handle'] +import copyreg +import functools +import io +import os +import pickle +import socket +import sys -import os -import sys -import socket -import threading -import struct -import signal +from . import popen +from . import util -from multiprocessing import current_process -from multiprocessing.util import register_after_fork, debug, sub_debug -from multiprocessing.util import is_exiting, sub_warning +__all__ = ['send_handle', 'recv_handle', 'ForkingPickler', 'register', 'dump'] +HAVE_SEND_HANDLE = (sys.platform == 'win32' or + (hasattr(socket, 'CMSG_LEN') and + hasattr(socket, 'SCM_RIGHTS') and + hasattr(socket.socket, 'sendmsg'))) + # -# +# Pickler subclass # -if not(sys.platform == 'win32' or (hasattr(socket, 'CMSG_LEN') and - hasattr(socket, 'SCM_RIGHTS'))): - raise ImportError('pickling of connections not supported') +class ForkingPickler(pickle.Pickler): + '''Pickler subclass used by multiprocessing.''' + _extra_reducers = {} + _copyreg_dispatch_table = copyreg.dispatch_table + + def __init__(self, *args): + super().__init__(*args) + self.dispatch_table = self._copyreg_dispatch_table.copy() + self.dispatch_table.update(self._extra_reducers) + + @classmethod + def register(cls, type, reduce): + '''Register a reduce function for a type.''' + cls._extra_reducers[type] = reduce + + @classmethod + def dumps(cls, obj, protocol=None): + buf = io.BytesIO() + cls(buf, protocol).dump(obj) + return buf.getbuffer() + + loads = pickle.loads + +register = ForkingPickler.register + +def dump(obj, file, protocol=None): + '''Replacement for pickle.dump() using ForkingPickler.''' + ForkingPickler(file, protocol).dump(obj) # # Platform specific definitions @@ -36,20 +65,44 @@ if sys.platform == 'win32': # Windows - __all__ += ['reduce_pipe_connection'] + __all__ += ['DupHandle', 'duplicate', 'steal_handle'] import _winapi + def duplicate(handle, target_process=None, inheritable=False): + '''Duplicate a handle. (target_process is a handle not a pid!)''' + if target_process is None: + target_process = _winapi.GetCurrentProcess() + return _winapi.DuplicateHandle( + _winapi.GetCurrentProcess(), handle, target_process, + 0, inheritable, _winapi.DUPLICATE_SAME_ACCESS) + + def steal_handle(source_pid, handle): + '''Steal a handle from process identified by source_pid.''' + source_process_handle = _winapi.OpenProcess( + _winapi.PROCESS_DUP_HANDLE, False, source_pid) + try: + return _winapi.DuplicateHandle( + source_process_handle, handle, + _winapi.GetCurrentProcess(), 0, False, + _winapi.DUPLICATE_SAME_ACCESS | _winapi.DUPLICATE_CLOSE_SOURCE) + finally: + _winapi.CloseHandle(source_process_handle) + def send_handle(conn, handle, destination_pid): + '''Send a handle over a local connection.''' dh = DupHandle(handle, _winapi.DUPLICATE_SAME_ACCESS, destination_pid) conn.send(dh) def recv_handle(conn): + '''Receive a handle over a local connection.''' return conn.recv().detach() class DupHandle(object): + '''Picklable wrapper for a handle.''' def __init__(self, handle, access, pid=None): - # duplicate handle for process with given pid if pid is None: + # We just duplicate the handle in the current process and + # let the receiving process steal the handle. pid = os.getpid() proc = _winapi.OpenProcess(_winapi.PROCESS_DUP_HANDLE, False, pid) try: @@ -62,9 +115,12 @@ self._pid = pid def detach(self): + '''Get the handle. This should only be called once.''' # retrieve handle from process which currently owns it if self._pid == os.getpid(): + # The handle has already been duplicated for this process. return self._handle + # We must steal the handle from the process whose pid is self._pid. proc = _winapi.OpenProcess(_winapi.PROCESS_DUP_HANDLE, False, self._pid) try: @@ -74,207 +130,112 @@ finally: _winapi.CloseHandle(proc) - class DupSocket(object): - def __init__(self, sock): - new_sock = sock.dup() - def send(conn, pid): - share = new_sock.share(pid) - conn.send_bytes(share) - self._id = resource_sharer.register(send, new_sock.close) - - def detach(self): - conn = resource_sharer.get_connection(self._id) - try: - share = conn.recv_bytes() - return socket.fromshare(share) - finally: - conn.close() - - def reduce_socket(s): - return rebuild_socket, (DupSocket(s),) - - def rebuild_socket(ds): - return ds.detach() - - def reduce_connection(conn): - handle = conn.fileno() - with socket.fromfd(handle, socket.AF_INET, socket.SOCK_STREAM) as s: - ds = DupSocket(s) - return rebuild_connection, (ds, conn.readable, conn.writable) - - def rebuild_connection(ds, readable, writable): - from .connection import Connection - sock = ds.detach() - return Connection(sock.detach(), readable, writable) - - def reduce_pipe_connection(conn): - access = ((_winapi.FILE_GENERIC_READ if conn.readable else 0) | - (_winapi.FILE_GENERIC_WRITE if conn.writable else 0)) - dh = DupHandle(conn.fileno(), access) - return rebuild_pipe_connection, (dh, conn.readable, conn.writable) - - def rebuild_pipe_connection(dh, readable, writable): - from .connection import PipeConnection - handle = dh.detach() - return PipeConnection(handle, readable, writable) - else: # Unix + __all__ += ['DupFd', 'sendfds', 'recvfds'] + import array # On MacOSX we should acknowledge receipt of fds -- see Issue14669 ACKNOWLEDGE = sys.platform == 'darwin' - def send_handle(conn, handle, destination_pid): - with socket.fromfd(conn.fileno(), socket.AF_UNIX, socket.SOCK_STREAM) as s: - s.sendmsg([b'x'], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, - struct.pack("@i", handle))]) - if ACKNOWLEDGE and conn.recv_bytes() != b'ACK': + def sendfds(sock, fds): + '''Send an array of fds over an AF_UNIX socket.''' + fds = array.array('i', fds) + msg = bytes([len(fds) % 256]) + sock.sendmsg([msg], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fds)]) + if ACKNOWLEDGE and sock.recv(1) != b'A': raise RuntimeError('did not receive acknowledgement of fd') + def recvfds(sock, size): + '''Receive an array of fds over an AF_UNIX socket.''' + a = array.array('i') + bytes_size = a.itemsize * size + msg, ancdata, flags, addr = sock.recvmsg(1, socket.CMSG_LEN(bytes_size)) + if not msg and not ancdata: + raise EOFError + try: + if ACKNOWLEDGE: + sock.send(b'A') + if len(ancdata) != 1: + raise RuntimeError('received %d items of ancdata' % + len(ancdata)) + cmsg_level, cmsg_type, cmsg_data = ancdata[0] + if (cmsg_level == socket.SOL_SOCKET and + cmsg_type == socket.SCM_RIGHTS): + if len(cmsg_data) % a.itemsize != 0: + raise ValueError + a.frombytes(cmsg_data) + assert len(a) % 256 == msg[0] + return list(a) + except (ValueError, IndexError): + pass + raise RuntimeError('Invalid data received') + + def send_handle(conn, handle, destination_pid): + '''Send a handle over a local connection.''' + with socket.fromfd(conn.fileno(), socket.AF_UNIX, socket.SOCK_STREAM) as s: + sendfds(s, [handle]) + def recv_handle(conn): - size = struct.calcsize("@i") + '''Receive a handle over a local connection.''' with socket.fromfd(conn.fileno(), socket.AF_UNIX, socket.SOCK_STREAM) as s: - msg, ancdata, flags, addr = s.recvmsg(1, socket.CMSG_LEN(size)) - try: - if ACKNOWLEDGE: - conn.send_bytes(b'ACK') - cmsg_level, cmsg_type, cmsg_data = ancdata[0] - if (cmsg_level == socket.SOL_SOCKET and - cmsg_type == socket.SCM_RIGHTS): - return struct.unpack("@i", cmsg_data[:size])[0] - except (ValueError, IndexError, struct.error): - pass - raise RuntimeError('Invalid data received') + return recvfds(s, 1)[0] - class DupFd(object): - def __init__(self, fd): - new_fd = os.dup(fd) - def send(conn, pid): - send_handle(conn, new_fd, pid) - def close(): - os.close(new_fd) - self._id = resource_sharer.register(send, close) - - def detach(self): - conn = resource_sharer.get_connection(self._id) - try: - return recv_handle(conn) - finally: - conn.close() - - def reduce_socket(s): - df = DupFd(s.fileno()) - return rebuild_socket, (df, s.family, s.type, s.proto) - - def rebuild_socket(df, family, type, proto): - fd = df.detach() - s = socket.fromfd(fd, family, type, proto) - os.close(fd) - return s - - def reduce_connection(conn): - df = DupFd(conn.fileno()) - return rebuild_connection, (df, conn.readable, conn.writable) - - def rebuild_connection(df, readable, writable): - from .connection import Connection - fd = df.detach() - return Connection(fd, readable, writable) + def DupFd(fd): + '''Return a wrapper for an fd.''' + popen_obj = popen.get_spawning_popen() + if popen_obj is not None: + return popen_obj.DupFd(popen_obj.duplicate_for_child(fd)) + elif HAVE_SEND_HANDLE: + from . import resource_sharer + return resource_sharer.DupFd(fd) + else: + raise ValueError('SCM_RIGHTS appears not to be available') # -# Server which shares registered resources with clients +# Try making some callable types picklable # -class ResourceSharer(object): - def __init__(self): - self._key = 0 - self._cache = {} - self._old_locks = [] - self._lock = threading.Lock() - self._listener = None - self._address = None - self._thread = None - register_after_fork(self, ResourceSharer._afterfork) +def _reduce_method(m): + if m.__self__ is None: + return getattr, (m.__class__, m.__func__.__name__) + else: + return getattr, (m.__self__, m.__func__.__name__) +class _C: + def f(self): + pass +register(type(_C().f), _reduce_method) - def register(self, send, close): - with self._lock: - if self._address is None: - self._start() - self._key += 1 - self._cache[self._key] = (send, close) - return (self._address, self._key) - @staticmethod - def get_connection(ident): - from .connection import Client - address, key = ident - c = Client(address, authkey=current_process().authkey) - c.send((key, os.getpid())) - return c +def _reduce_method_descriptor(m): + return getattr, (m.__objclass__, m.__name__) +register(type(list.append), _reduce_method_descriptor) +register(type(int.__add__), _reduce_method_descriptor) - def stop(self, timeout=None): - from .connection import Client - with self._lock: - if self._address is not None: - c = Client(self._address, authkey=current_process().authkey) - c.send(None) - c.close() - self._thread.join(timeout) - if self._thread.is_alive(): - sub_warn('ResourceSharer thread did not stop when asked') - self._listener.close() - self._thread = None - self._address = None - self._listener = None - for key, (send, close) in self._cache.items(): - close() - self._cache.clear() - def _afterfork(self): - for key, (send, close) in self._cache.items(): - close() - self._cache.clear() - # If self._lock was locked at the time of the fork, it may be broken - # -- see issue 6721. Replace it without letting it be gc'ed. - self._old_locks.append(self._lock) - self._lock = threading.Lock() - if self._listener is not None: - self._listener.close() - self._listener = None - self._address = None - self._thread = None +def _reduce_partial(p): + return _rebuild_partial, (p.func, p.args, p.keywords or {}) +def _rebuild_partial(func, args, keywords): + return functools.partial(func, *args, **keywords) +register(functools.partial, _reduce_partial) - def _start(self): - from .connection import Listener - assert self._listener is None - debug('starting listener and thread for sending handles') - self._listener = Listener(authkey=current_process().authkey) - self._address = self._listener.address - t = threading.Thread(target=self._serve) - t.daemon = True - t.start() - self._thread = t +# +# Make sockets picklable +# - def _serve(self): - if hasattr(signal, 'pthread_sigmask'): - signal.pthread_sigmask(signal.SIG_BLOCK, range(1, signal.NSIG)) - while 1: - try: - conn = self._listener.accept() - msg = conn.recv() - if msg is None: - break - key, destination_pid = msg - send, close = self._cache.pop(key) - send(conn, destination_pid) - close() - conn.close() - except: - if not is_exiting(): - import traceback - sub_warning( - 'thread for sharing handles raised exception :\n' + - '-'*79 + '\n' + traceback.format_exc() + '-'*79 - ) +if sys.platform == 'win32': + def _reduce_socket(s): + from .resource_sharer import DupSocket + return _rebuild_socket, (DupSocket(s),) + def _rebuild_socket(ds): + return ds.detach() + register(socket.socket, _reduce_socket) -resource_sharer = ResourceSharer() +else: + def _reduce_socket(s): + df = DupFd(s.fileno()) + return _rebuild_socket, (df, s.family, s.type, s.proto) + def _rebuild_socket(df, family, type, proto): + fd = df.detach() + return socket.socket(family, type, proto, fileno=fd) + register(socket.socket, _reduce_socket) diff --git a/Lib/multiprocessing/resource_sharer.py b/Lib/multiprocessing/resource_sharer.py new file mode 100644 --- /dev/null +++ b/Lib/multiprocessing/resource_sharer.py @@ -0,0 +1,158 @@ +# +# We use a background thread for sharing fds on Unix, and for sharing sockets on +# Windows. +# +# A client which wants to pickle a resource registers it with the resource +# sharer and gets an identifier in return. The unpickling process will connect +# to the resource sharer, sends the identifier and its pid, and then receives +# the resource. +# + +import os +import signal +import socket +import sys +import threading + +from . import process +from . import reduction +from . import util + +__all__ = ['stop'] + + +if sys.platform == 'win32': + __all__ += ['DupSocket'] + + class DupSocket(object): + '''Picklable wrapper for a socket.''' + def __init__(self, sock): + new_sock = sock.dup() + def send(conn, pid): + share = new_sock.share(pid) + conn.send_bytes(share) + self._id = _resource_sharer.register(send, new_sock.close) + + def detach(self): + '''Get the socket. This should only be called once.''' + with _resource_sharer.get_connection(self._id) as conn: + share = conn.recv_bytes() + return socket.fromshare(share) + +else: + __all__ += ['DupFd'] + + class DupFd(object): + '''Wrapper for fd which can be used at any time.''' + def __init__(self, fd): + new_fd = os.dup(fd) + def send(conn, pid): + reduction.send_handle(conn, new_fd, pid) + def close(): + os.close(new_fd) + self._id = _resource_sharer.register(send, close) + + def detach(self): + '''Get the fd. This should only be called once.''' + with _resource_sharer.get_connection(self._id) as conn: + return reduction.recv_handle(conn) + + +class _ResourceSharer(object): + '''Manager for resouces using background thread.''' + def __init__(self): + self._key = 0 + self._cache = {} + self._old_locks = [] + self._lock = threading.Lock() + self._listener = None + self._address = None + self._thread = None + util.register_after_fork(self, _ResourceSharer._afterfork) + + def register(self, send, close): + '''Register resource, returning an identifier.''' + with self._lock: + if self._address is None: + self._start() + self._key += 1 + self._cache[self._key] = (send, close) + return (self._address, self._key) + + @staticmethod + def get_connection(ident): + '''Return connection from which to receive identified resource.''' + from .connection import Client + address, key = ident + c = Client(address, authkey=process.current_process().authkey) + c.send((key, os.getpid())) + return c + + def stop(self, timeout=None): + '''Stop the background thread and clear registered resources.''' + from .connection import Client + with self._lock: + if self._address is not None: + c = Client(self._address, + authkey=process.current_process().authkey) + c.send(None) + c.close() + self._thread.join(timeout) + if self._thread.is_alive(): + util.sub_warning('_ResourceSharer thread did ' + 'not stop when asked') + self._listener.close() + self._thread = None + self._address = None + self._listener = None + for key, (send, close) in self._cache.items(): + close() + self._cache.clear() + + def _afterfork(self): + for key, (send, close) in self._cache.items(): + close() + self._cache.clear() + # If self._lock was locked at the time of the fork, it may be broken + # -- see issue 6721. Replace it without letting it be gc'ed. + self._old_locks.append(self._lock) + self._lock = threading.Lock() + if self._listener is not None: + self._listener.close() + self._listener = None + self._address = None + self._thread = None + + def _start(self): + from .connection import Listener + assert self._listener is None + util.debug('starting listener and thread for sending handles') + self._listener = Listener(authkey=process.current_process().authkey) + self._address = self._listener.address + t = threading.Thread(target=self._serve) + t.daemon = True + t.start() + self._thread = t + + def _serve(self): + if hasattr(signal, 'pthread_sigmask'): + signal.pthread_sigmask(signal.SIG_BLOCK, range(1, signal.NSIG)) + while 1: + try: + with self._listener.accept() as conn: + msg = conn.recv() + if msg is None: + break + key, destination_pid = msg + send, close = self._cache.pop(key) + try: + send(conn, destination_pid) + finally: + close() + except: + if not util.is_exiting(): + sys.excepthook(*sys.exc_info()) + + +_resource_sharer = _ResourceSharer() +stop = _resource_sharer.stop diff --git a/Lib/multiprocessing/semaphore_tracker.py b/Lib/multiprocessing/semaphore_tracker.py new file mode 100644 --- /dev/null +++ b/Lib/multiprocessing/semaphore_tracker.py @@ -0,0 +1,135 @@ +# +# On Unix we run a server process which keeps track of unlinked +# semaphores. The server ignores SIGINT and SIGTERM and reads from a +# pipe. Every other process of the program has a copy of the writable +# end of the pipe, so we get EOF when all other processes have exited. +# Then the server process unlinks any remaining semaphore names. +# +# This is important because the system only supports a limited number +# of named semaphores, and they will not be automatically removed till +# the next reboot. Without this semaphore tracker process, "killall +# python" would probably leave unlinked semaphores. +# + +import errno +import os +import signal +import sys +import threading +import warnings +import _multiprocessing + +from . import spawn +from . import util +from . import current_process + +__all__ = ['ensure_running', 'register', 'unregister'] + + +_lock = threading.Lock() + + +def ensure_running(): + '''Make sure that semaphore tracker process is running. + + This can be run from any process. Usually a child process will use + the semaphore created by its parent.''' + with _lock: + config = current_process()._config + if config.get('semaphore_tracker_fd') is not None: + return + fds_to_pass = [] + try: + fds_to_pass.append(sys.stderr.fileno()) + except Exception: + pass + cmd = 'from multiprocessing.semaphore_tracker import main; main(%d)' + r, semaphore_tracker_fd = util.pipe() + try: + fds_to_pass.append(r) + # process will out live us, so no need to wait on pid + exe = spawn.get_executable() + args = [exe] + util._args_from_interpreter_flags() + args += ['-c', cmd % r] + util.spawnv_passfds(exe, args, fds_to_pass) + except: + os.close(semaphore_tracker_fd) + raise + else: + config['semaphore_tracker_fd'] = semaphore_tracker_fd + finally: + os.close(r) + + +def register(name): + '''Register name of semaphore with semaphore tracker.''' + _send('REGISTER', name) + + +def unregister(name): + '''Unregister name of semaphore with semaphore tracker.''' + _send('UNREGISTER', name) + + +def _send(cmd, name): + msg = '{0}:{1}\n'.format(cmd, name).encode('ascii') + if len(name) > 512: + # posix guarantees that writes to a pipe of less than PIPE_BUF + # bytes are atomic, and that PIPE_BUF >= 512 + raise ValueError('name too long') + fd = current_process()._config['semaphore_tracker_fd'] + nbytes = os.write(fd, msg) + assert nbytes == len(msg) + + +def main(fd): + '''Run semaphore tracker.''' + # protect the process from ^C and "killall python" etc + signal.signal(signal.SIGINT, signal.SIG_IGN) + signal.signal(signal.SIGTERM, signal.SIG_IGN) + + for f in (sys.stdin, sys.stdout): + try: + f.close() + except Exception: + pass + + cache = set() + try: + # keep track of registered/unregistered semaphores + with open(fd, 'rb') as f: + for line in f: + try: + cmd, name = line.strip().split(b':') + if cmd == b'REGISTER': + cache.add(name) + elif cmd == b'UNREGISTER': + cache.remove(name) + else: + raise RuntimeError('unrecognized command %r' % cmd) + except Exception: + try: + sys.excepthook(*sys.exc_info()) + except: + pass + finally: + # all processes have terminated; cleanup any remaining semaphores + if cache: + try: + warnings.warn('semaphore_tracker: There appear to be %d ' + 'leaked semaphores to clean up at shutdown' % + len(cache)) + except Exception: + pass + for name in cache: + # For some reason the process which created and registered this + # semaphore has failed to unregister it. Presumably it has died. + # We therefore unlink it. + try: + name = name.decode('ascii') + try: + _multiprocessing.sem_unlink(name) + except Exception as e: + warnings.warn('semaphore_tracker: %r: %s' % (name, e)) + finally: + pass diff --git a/Lib/multiprocessing/sharedctypes.py b/Lib/multiprocessing/sharedctypes.py --- a/Lib/multiprocessing/sharedctypes.py +++ b/Lib/multiprocessing/sharedctypes.py @@ -10,8 +10,11 @@ import ctypes import weakref -from multiprocessing import heap, RLock -from multiprocessing.forking import assert_spawning, ForkingPickler +from . import heap + +from .synchronize import RLock +from .reduction import ForkingPickler +from .popen import assert_spawning __all__ = ['RawValue', 'RawArray', 'Value', 'Array', 'copy', 'synchronized'] diff --git a/Lib/multiprocessing/spawn.py b/Lib/multiprocessing/spawn.py new file mode 100644 --- /dev/null +++ b/Lib/multiprocessing/spawn.py @@ -0,0 +1,258 @@ +# +# Code used to start processes when using the spawn or forkserver +# start methods. +# +# multiprocessing/spawn.py +# +# Copyright (c) 2006-2008, R Oudkerk +# Licensed to PSF under a Contributor Agreement. +# + +import os +import pickle +import sys + +from . import process +from . import util +from . import popen + +__all__ = ['_main', 'freeze_support', 'set_executable', 'get_executable', + 'get_preparation_data', 'get_command_line', 'import_main_path'] + +# +# _python_exe is the assumed path to the python executable. +# People embedding Python want to modify it. +# + +if sys.platform != 'win32': + WINEXE = False + WINSERVICE = False +else: + WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False)) + WINSERVICE = sys.executable.lower().endswith("pythonservice.exe") + +if WINSERVICE: + _python_exe = os.path.join(sys.exec_prefix, 'python.exe') +else: + _python_exe = sys.executable + +def set_executable(exe): + global _python_exe + _python_exe = exe + +def get_executable(): + return _python_exe + +# +# +# + +def is_forking(argv): + ''' + Return whether commandline indicates we are forking + ''' + if len(argv) >= 2 and argv[1] == '--multiprocessing-fork': + return True + else: + return False + + +def freeze_support(): + ''' + Run code for process object if this in not the main process + ''' + if is_forking(sys.argv): + main() + sys.exit() + + +def get_command_line(): + ''' + Returns prefix of command line used for spawning a child process + ''' + if getattr(sys, 'frozen', False): + return [sys.executable, '--multiprocessing-fork'] + else: + prog = 'from multiprocessing.spawn import spawn_main; spawn_main()' + opts = util._args_from_interpreter_flags() + return [_python_exe] + opts + ['-c', prog, '--multiprocessing-fork'] + + +def spawn_main(): + ''' + Run code specifed by data received over pipe + ''' + assert is_forking(sys.argv) + handle = int(sys.argv[-1]) + if sys.platform == 'win32': + import msvcrt + from .reduction import steal_handle + pid = int(sys.argv[-2]) + new_handle = steal_handle(pid, handle) + fd = msvcrt.open_osfhandle(new_handle, os.O_RDONLY) + else: + fd = handle + exitcode = _main(fd) + sys.exit(exitcode) + + +def _main(fd): + with os.fdopen(fd, 'rb', closefd=True) as from_parent: + process.current_process()._inheriting = True + try: + preparation_data = pickle.load(from_parent) + prepare(preparation_data) + self = pickle.load(from_parent) + finally: + del process.current_process()._inheriting + return self._bootstrap() + + +def _check_not_importing_main(): + if getattr(process.current_process(), '_inheriting', False): + raise RuntimeError(''' + An attempt has been made to start a new process before the + current process has finished its bootstrapping phase. + + This probably means that you are not using fork to start your + child processes and you have forgotten to use the proper idiom + in the main module: + + if __name__ == '__main__': + freeze_support() + ... + + The "freeze_support()" line can be omitted if the program + is not going to be frozen to produce an executable.''') + + +def get_preparation_data(name): + ''' + Return info about parent needed by child to unpickle process object + ''' + _check_not_importing_main() + d = dict( + log_to_stderr=util._log_to_stderr, + authkey=process.current_process().authkey, + ) + + if util._logger is not None: + d['log_level'] = util._logger.getEffectiveLevel() + + sys_path=sys.path.copy() + try: + i = sys_path.index('') + except ValueError: + pass + else: + sys_path[i] = process.ORIGINAL_DIR + + d.update( + name=name, + sys_path=sys_path, + sys_argv=sys.argv, + orig_dir=process.ORIGINAL_DIR, + dir=os.getcwd(), + start_method=popen.get_start_method(), + ) + + if sys.platform != 'win32' or (not WINEXE and not WINSERVICE): + main_path = getattr(sys.modules['__main__'], '__file__', None) + if not main_path and sys.argv[0] not in ('', '-c'): + main_path = sys.argv[0] + if main_path is not None: + if (not os.path.isabs(main_path) and + process.ORIGINAL_DIR is not None): + main_path = os.path.join(process.ORIGINAL_DIR, main_path) + d['main_path'] = os.path.normpath(main_path) + + return d + +# +# Prepare current process +# + +old_main_modules = [] + +def prepare(data): + ''' + Try to get current process ready to unpickle process object + ''' + if 'name' in data: + process.current_process().name = data['name'] + + if 'authkey' in data: + process.current_process().authkey = data['authkey'] + + if 'log_to_stderr' in data and data['log_to_stderr']: + util.log_to_stderr() + + if 'log_level' in data: + util.get_logger().setLevel(data['log_level']) + + if 'sys_path' in data: + sys.path = data['sys_path'] + + if 'sys_argv' in data: + sys.argv = data['sys_argv'] + + if 'dir' in data: + os.chdir(data['dir']) + + if 'orig_dir' in data: + process.ORIGINAL_DIR = data['orig_dir'] + + if 'start_method' in data: + popen.set_start_method(data['start_method'], start_helpers=False) + + if 'main_path' in data: + import_main_path(data['main_path']) + + +def import_main_path(main_path): + ''' + Set sys.modules['__main__'] to module at main_path + ''' + # XXX (ncoghlan): The following code makes several bogus + # assumptions regarding the relationship between __file__ + # and a module's real name. See PEP 302 and issue #10845 + if getattr(sys.modules['__main__'], '__file__', None) == main_path: + return + + main_name = os.path.splitext(os.path.basename(main_path))[0] + if main_name == '__init__': + main_name = os.path.basename(os.path.dirname(main_path)) + + if main_name == '__main__': + main_module = sys.modules['__main__'] + main_module.__file__ = main_path + elif main_name != 'ipython': + # Main modules not actually called __main__.py may + # contain additional code that should still be executed + import importlib + import types + + if main_path is None: + dirs = None + elif os.path.basename(main_path).startswith('__init__.py'): + dirs = [os.path.dirname(os.path.dirname(main_path))] + else: + dirs = [os.path.dirname(main_path)] + + assert main_name not in sys.modules, main_name + sys.modules.pop('__mp_main__', None) + # We should not try to load __main__ + # since that would execute 'if __name__ == "__main__"' + # clauses, potentially causing a psuedo fork bomb. + loader = importlib.find_loader(main_name, path=dirs) + main_module = types.ModuleType(main_name) + try: + loader.init_module_attrs(main_module) + except AttributeError: # init_module_attrs is optional + pass + main_module.__name__ = '__mp_main__' + code = loader.get_code(main_name) + exec(code, main_module.__dict__) + + old_main_modules.append(sys.modules['__main__']) + sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py --- a/Lib/multiprocessing/synchronize.py +++ b/Lib/multiprocessing/synchronize.py @@ -11,20 +11,24 @@ 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event' ] +import os import threading import sys +import itertools +import tempfile +import _multiprocessing -import _multiprocessing -from multiprocessing.process import current_process -from multiprocessing.util import register_after_fork, debug -from multiprocessing.forking import assert_spawning, Popen from time import time as _time +from . import popen +from . import process +from . import util + # Try to import the mp.synchronize module cleanly, if it fails # raise ImportError for platforms lacking a working sem_open implementation. # See issue 3770 try: - from _multiprocessing import SemLock + from _multiprocessing import SemLock, sem_unlink except (ImportError): raise ImportError("This platform lacks a functioning sem_open" + " implementation, therefore, the required" + @@ -44,15 +48,45 @@ class SemLock(object): + _rand = tempfile._RandomNameSequence() + def __init__(self, kind, value, maxvalue): - sl = self._semlock = _multiprocessing.SemLock(kind, value, maxvalue) - debug('created semlock with handle %s' % sl.handle) + unlink_immediately = (sys.platform == 'win32' or + popen.get_start_method() == 'fork') + for i in range(100): + try: + sl = self._semlock = _multiprocessing.SemLock( + kind, value, maxvalue, self._make_name(), + unlink_immediately) + except FileExistsError: + pass + else: + break + else: + raise FileExistsError('cannot find name for semaphore') + + util.debug('created semlock with handle %s' % sl.handle) self._make_methods() if sys.platform != 'win32': def _after_fork(obj): obj._semlock._after_fork() - register_after_fork(self, _after_fork) + util.register_after_fork(self, _after_fork) + + if self._semlock.name is not None: + # We only get here if we are on Unix with forking + # disabled. When the object is garbage collected or the + # process shuts down we unlink the semaphore name + from .semaphore_tracker import register + register(self._semlock.name) + util.Finalize(self, SemLock._cleanup, (self._semlock.name,), + exitpriority=0) + + @staticmethod + def _cleanup(name): + from .semaphore_tracker import unregister + sem_unlink(name) + unregister(name) def _make_methods(self): self.acquire = self._semlock.acquire @@ -65,15 +99,24 @@ return self._semlock.__exit__(*args) def __getstate__(self): - assert_spawning(self) + popen.assert_spawning(self) sl = self._semlock - return (Popen.duplicate_for_child(sl.handle), sl.kind, sl.maxvalue) + if sys.platform == 'win32': + h = popen.get_spawning_popen().duplicate_for_child(sl.handle) + else: + h = sl.handle + return (h, sl.kind, sl.maxvalue, sl.name) def __setstate__(self, state): self._semlock = _multiprocessing.SemLock._rebuild(*state) - debug('recreated blocker with handle %r' % state[0]) + util.debug('recreated blocker with handle %r' % state[0]) self._make_methods() + @staticmethod + def _make_name(): + return '/%s-%s' % (process.current_process()._config['semprefix'], + next(SemLock._rand)) + # # Semaphore # @@ -122,7 +165,7 @@ def __repr__(self): try: if self._semlock._is_mine(): - name = current_process().name + name = process.current_process().name if threading.current_thread().name != 'MainThread': name += '|' + threading.current_thread().name elif self._semlock._get_value() == 1: @@ -147,7 +190,7 @@ def __repr__(self): try: if self._semlock._is_mine(): - name = current_process().name + name = process.current_process().name if threading.current_thread().name != 'MainThread': name += '|' + threading.current_thread().name count = self._semlock._count() @@ -175,7 +218,7 @@ self._make_methods() def __getstate__(self): - assert_spawning(self) + popen.assert_spawning(self) return (self._lock, self._sleeping_count, self._woken_count, self._wait_semaphore) @@ -342,7 +385,7 @@ def __init__(self, parties, action=None, timeout=None): import struct - from multiprocessing.heap import BufferWrapper + from .heap import BufferWrapper wrapper = BufferWrapper(struct.calcsize('i') * 2) cond = Condition() self.__setstate__((parties, action, timeout, cond, wrapper)) diff --git a/Lib/multiprocessing/util.py b/Lib/multiprocessing/util.py --- a/Lib/multiprocessing/util.py +++ b/Lib/multiprocessing/util.py @@ -17,13 +17,13 @@ # cleanup function before multiprocessing does from subprocess import _args_from_interpreter_flags -from multiprocessing.process import current_process, active_children +from . import process __all__ = [ 'sub_debug', 'debug', 'info', 'sub_warning', 'get_logger', 'log_to_stderr', 'get_temp_dir', 'register_after_fork', 'is_exiting', 'Finalize', 'ForkAwareThreadLock', 'ForkAwareLocal', - 'SUBDEBUG', 'SUBWARNING', + 'close_all_fds_except', 'SUBDEBUG', 'SUBWARNING', ] # @@ -71,8 +71,6 @@ _logger = logging.getLogger(LOGGER_NAME) _logger.propagate = 0 - logging.addLevelName(SUBDEBUG, 'SUBDEBUG') - logging.addLevelName(SUBWARNING, 'SUBWARNING') # XXX multiprocessing should cleanup before logging if hasattr(atexit, 'unregister'): @@ -111,13 +109,14 @@ def get_temp_dir(): # get name of a temp directory which will be automatically cleaned up - if current_process()._tempdir is None: + tempdir = process.current_process()._config.get('tempdir') + if tempdir is None: import shutil, tempfile tempdir = tempfile.mkdtemp(prefix='pymp-') info('created temp directory %s', tempdir) Finalize(None, shutil.rmtree, args=[tempdir], exitpriority=-100) - current_process()._tempdir = tempdir - return current_process()._tempdir + process.current_process()._config['tempdir'] = tempdir + return tempdir # # Support for reinitialization of objects when bootstrapping a child process @@ -273,8 +272,8 @@ _exiting = False def _exit_function(info=info, debug=debug, _run_finalizers=_run_finalizers, - active_children=active_children, - current_process=current_process): + active_children=process.active_children, + current_process=process.current_process): # We hold on to references to functions in the arglist due to the # situation described below, where this function is called after this # module's globals are destroyed. @@ -303,7 +302,7 @@ # #9207. for p in active_children(): - if p._daemonic: + if p.daemon: info('calling terminate() for daemon %s', p.name) p._popen.terminate() @@ -335,3 +334,54 @@ register_after_fork(self, lambda obj : obj.__dict__.clear()) def __reduce__(self): return type(self), () + +# +# Close fds except those specified +# + +try: + MAXFD = os.sysconf("SC_OPEN_MAX") +except Exception: + MAXFD = 256 + +def close_all_fds_except(fds): + fds = list(fds) + [-1, MAXFD] + fds.sort() + assert fds[-1] == MAXFD, 'fd too large' + for i in range(len(fds) - 1): + os.closerange(fds[i]+1, fds[i+1]) + +# +# Start a program with only specified fds kept open +# + +def spawnv_passfds(path, args, passfds): + import _posixsubprocess, fcntl + passfds = sorted(passfds) + tmp = [] + # temporarily unset CLOEXEC on passed fds + for fd in passfds: + flag = fcntl.fcntl(fd, fcntl.F_GETFD) + if flag & fcntl.FD_CLOEXEC: + fcntl.fcntl(fd, fcntl.F_SETFD, flag & ~fcntl.FD_CLOEXEC) + tmp.append((fd, flag)) + errpipe_read, errpipe_write = _posixsubprocess.cloexec_pipe() + try: + return _posixsubprocess.fork_exec( + args, [os.fsencode(path)], True, passfds, None, None, + -1, -1, -1, -1, -1, -1, errpipe_read, errpipe_write, + False, False, None) + finally: + os.close(errpipe_read) + os.close(errpipe_write) + # reset CLOEXEC where necessary + for fd, flag in tmp: + fcntl.fcntl(fd, fcntl.F_SETFD, flag) + +# +# Return pipe with CLOEXEC set on fds +# + +def pipe(): + import _posixsubprocess + return _posixsubprocess.cloexec_pipe() diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/_test_multiprocessing.py rename from Lib/test/test_multiprocessing.py rename to Lib/test/_test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -43,7 +43,7 @@ try: from multiprocessing import reduction - HAS_REDUCTION = True + HAS_REDUCTION = reduction.HAVE_SEND_HANDLE except ImportError: HAS_REDUCTION = False @@ -99,6 +99,9 @@ except: MAXFD = 256 +# To speed up tests when using the forkserver, we can preload these: +PRELOAD = ['__main__', 'test.test_multiprocessing_forkserver'] + # # Some tests require ctypes # @@ -330,7 +333,6 @@ @classmethod def _test_recursion(cls, wconn, id): - from multiprocessing import forking wconn.send(id) if len(id) < 2: for i in range(2): @@ -378,7 +380,7 @@ self.assertFalse(wait_for_handle(sentinel, timeout=0.0)) event.set() p.join() - self.assertTrue(wait_for_handle(sentinel, timeout=DELTA)) + self.assertTrue(wait_for_handle(sentinel, timeout=1)) # # @@ -2493,7 +2495,7 @@ @classmethod def tearDownClass(cls): - from multiprocessing.reduction import resource_sharer + from multiprocessing import resource_sharer resource_sharer.stop(timeout=5) @classmethod @@ -2807,30 +2809,40 @@ # Test that from ... import * works for each module # -class _TestImportStar(BaseTestCase): - - ALLOWED_TYPES = ('processes',) +class _TestImportStar(unittest.TestCase): + + def get_module_names(self): + import glob + folder = os.path.dirname(multiprocessing.__file__) + pattern = os.path.join(folder, '*.py') + files = glob.glob(pattern) + modules = [os.path.splitext(os.path.split(f)[1])[0] for f in files] + modules = ['multiprocessing.' + m for m in modules] + modules.remove('multiprocessing.__init__') + modules.append('multiprocessing') + return modules def test_import(self): - modules = [ - 'multiprocessing', 'multiprocessing.connection', - 'multiprocessing.heap', 'multiprocessing.managers', - 'multiprocessing.pool', 'multiprocessing.process', - 'multiprocessing.synchronize', 'multiprocessing.util' - ] - - if HAS_REDUCTION: - modules.append('multiprocessing.reduction') - - if c_int is not None: + modules = self.get_module_names() + if sys.platform == 'win32': + modules.remove('multiprocessing.popen_fork') + modules.remove('multiprocessing.popen_forkserver') + modules.remove('multiprocessing.popen_spawn_posix') + else: + modules.remove('multiprocessing.popen_spawn_win32') + if not HAS_REDUCTION: + modules.remove('multiprocessing.popen_forkserver') + + if c_int is None: # This module requires _ctypes - modules.append('multiprocessing.sharedctypes') + modules.remove('multiprocessing.sharedctypes') for name in modules: __import__(name) mod = sys.modules[name] - - for attr in getattr(mod, '__all__', ()): + self.assertTrue(hasattr(mod, '__all__'), name) + + for attr in mod.__all__: self.assertTrue( hasattr(mod, attr), '%r does not have attribute %r' % (mod, attr) @@ -2953,131 +2965,6 @@ self.assertRaises((ValueError, OSError), multiprocessing.connection.Connection, -1) -# -# Functions used to create test cases from the base ones in this module -# - -def create_test_cases(Mixin, type): - result = {} - glob = globals() - Type = type.capitalize() - ALL_TYPES = {'processes', 'threads', 'manager'} - - for name in list(glob.keys()): - if name.startswith('_Test'): - base = glob[name] - assert set(base.ALLOWED_TYPES) <= ALL_TYPES, set(base.ALLOWED_TYPES) - if type in base.ALLOWED_TYPES: - newname = 'With' + Type + name[1:] - class Temp(base, Mixin, unittest.TestCase): - pass - result[newname] = Temp - Temp.__name__ = Temp.__qualname__ = newname - Temp.__module__ = Mixin.__module__ - return result - -# -# Create test cases -# - -class ProcessesMixin(object): - TYPE = 'processes' - Process = multiprocessing.Process - connection = multiprocessing.connection - current_process = staticmethod(multiprocessing.current_process) - active_children = staticmethod(multiprocessing.active_children) - Pool = staticmethod(multiprocessing.Pool) - Pipe = staticmethod(multiprocessing.Pipe) - Queue = staticmethod(multiprocessing.Queue) - JoinableQueue = staticmethod(multiprocessing.JoinableQueue) - Lock = staticmethod(multiprocessing.Lock) - RLock = staticmethod(multiprocessing.RLock) - Semaphore = staticmethod(multiprocessing.Semaphore) - BoundedSemaphore = staticmethod(multiprocessing.BoundedSemaphore) - Condition = staticmethod(multiprocessing.Condition) - Event = staticmethod(multiprocessing.Event) - Barrier = staticmethod(multiprocessing.Barrier) - Value = staticmethod(multiprocessing.Value) - Array = staticmethod(multiprocessing.Array) - RawValue = staticmethod(multiprocessing.RawValue) - RawArray = staticmethod(multiprocessing.RawArray) - -testcases_processes = create_test_cases(ProcessesMixin, type='processes') -globals().update(testcases_processes) - - -class ManagerMixin(object): - TYPE = 'manager' - Process = multiprocessing.Process - Queue = property(operator.attrgetter('manager.Queue')) - JoinableQueue = property(operator.attrgetter('manager.JoinableQueue')) - Lock = property(operator.attrgetter('manager.Lock')) - RLock = property(operator.attrgetter('manager.RLock')) - Semaphore = property(operator.attrgetter('manager.Semaphore')) - BoundedSemaphore = property(operator.attrgetter('manager.BoundedSemaphore')) - Condition = property(operator.attrgetter('manager.Condition')) - Event = property(operator.attrgetter('manager.Event')) - Barrier = property(operator.attrgetter('manager.Barrier')) - Value = property(operator.attrgetter('manager.Value')) - Array = property(operator.attrgetter('manager.Array')) - list = property(operator.attrgetter('manager.list')) - dict = property(operator.attrgetter('manager.dict')) - Namespace = property(operator.attrgetter('manager.Namespace')) - - @classmethod - def Pool(cls, *args, **kwds): - return cls.manager.Pool(*args, **kwds) - - @classmethod - def setUpClass(cls): - cls.manager = multiprocessing.Manager() - - @classmethod - def tearDownClass(cls): - # only the manager process should be returned by active_children() - # but this can take a bit on slow machines, so wait a few seconds - # if there are other children too (see #17395) - t = 0.01 - while len(multiprocessing.active_children()) > 1 and t < 5: - time.sleep(t) - t *= 2 - gc.collect() # do garbage collection - if cls.manager._number_of_objects() != 0: - # This is not really an error since some tests do not - # ensure that all processes which hold a reference to a - # managed object have been joined. - print('Shared objects which still exist at manager shutdown:') - print(cls.manager._debug_info()) - cls.manager.shutdown() - cls.manager.join() - cls.manager = None - -testcases_manager = create_test_cases(ManagerMixin, type='manager') -globals().update(testcases_manager) - - -class ThreadsMixin(object): - TYPE = 'threads' - Process = multiprocessing.dummy.Process - connection = multiprocessing.dummy.connection - current_process = staticmethod(multiprocessing.dummy.current_process) - active_children = staticmethod(multiprocessing.dummy.active_children) - Pool = staticmethod(multiprocessing.Pool) - Pipe = staticmethod(multiprocessing.dummy.Pipe) - Queue = staticmethod(multiprocessing.dummy.Queue) - JoinableQueue = staticmethod(multiprocessing.dummy.JoinableQueue) - Lock = staticmethod(multiprocessing.dummy.Lock) - RLock = staticmethod(multiprocessing.dummy.RLock) - Semaphore = staticmethod(multiprocessing.dummy.Semaphore) - BoundedSemaphore = staticmethod(multiprocessing.dummy.BoundedSemaphore) - Condition = staticmethod(multiprocessing.dummy.Condition) - Event = staticmethod(multiprocessing.dummy.Event) - Barrier = staticmethod(multiprocessing.dummy.Barrier) - Value = staticmethod(multiprocessing.dummy.Value) - Array = staticmethod(multiprocessing.dummy.Array) - -testcases_threads = create_test_cases(ThreadsMixin, type='threads') -globals().update(testcases_threads) class OtherTest(unittest.TestCase): @@ -3427,7 +3314,7 @@ def test_flags(self): import json, subprocess # start child process using unusual flags - prog = ('from test.test_multiprocessing import TestFlags; ' + + prog = ('from test._test_multiprocessing import TestFlags; ' + 'TestFlags.run_in_child()') data = subprocess.check_output( [sys.executable, '-E', '-S', '-O', '-c', prog]) @@ -3474,13 +3361,14 @@ class TestNoForkBomb(unittest.TestCase): def test_noforkbomb(self): + sm = multiprocessing.get_start_method() name = os.path.join(os.path.dirname(__file__), 'mp_fork_bomb.py') - if WIN32: - rc, out, err = test.script_helper.assert_python_failure(name) + if sm != 'fork': + rc, out, err = test.script_helper.assert_python_failure(name, sm) self.assertEqual('', out.decode('ascii')) self.assertIn('RuntimeError', err.decode('ascii')) else: - rc, out, err = test.script_helper.assert_python_ok(name) + rc, out, err = test.script_helper.assert_python_ok(name, sm) self.assertEqual('123', out.decode('ascii').rstrip()) self.assertEqual('', err.decode('ascii')) @@ -3514,6 +3402,72 @@ self.assertLessEqual(new_size, old_size) # +# Check that non-forked child processes do not inherit unneeded fds/handles +# + +class TestCloseFds(unittest.TestCase): + + def get_high_socket_fd(self): + if WIN32: + # The child process will not have any socket handles, so + # calling socket.fromfd() should produce WSAENOTSOCK even + # if there is a handle of the same number. + return socket.socket().detach() + else: + # We want to produce a socket with an fd high enough that a + # freshly created child process will not have any fds as high. + fd = socket.socket().detach() + to_close = [] + while fd < 50: + to_close.append(fd) + fd = os.dup(fd) + for x in to_close: + os.close(x) + return fd + + def close(self, fd): + if WIN32: + socket.socket(fileno=fd).close() + else: + os.close(fd) + + @classmethod + def _test_closefds(cls, conn, fd): + try: + s = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM) + except Exception as e: + conn.send(e) + else: + s.close() + conn.send(None) + + def test_closefd(self): + if not HAS_REDUCTION: + raise unittest.SkipTest('requires fd pickling') + + reader, writer = multiprocessing.Pipe() + fd = self.get_high_socket_fd() + try: + p = multiprocessing.Process(target=self._test_closefds, + args=(writer, fd)) + p.start() + writer.close() + e = reader.recv() + p.join(timeout=5) + finally: + self.close(fd) + writer.close() + reader.close() + + if multiprocessing.get_start_method() == 'fork': + self.assertIs(e, None) + else: + WSAENOTSOCK = 10038 + self.assertIsInstance(e, OSError) + self.assertTrue(e.errno == errno.EBADF or + e.winerror == WSAENOTSOCK, e) + +# # Issue #17097: EINTR should be ignored by recv(), send(), accept() etc # @@ -3557,10 +3511,10 @@ def handler(signum, frame): pass signal.signal(signal.SIGUSR1, handler) - l = multiprocessing.connection.Listener() - conn.send(l.address) - a = l.accept() - a.send('welcome') + with multiprocessing.connection.Listener() as l: + conn.send(l.address) + a = l.accept() + a.send('welcome') @unittest.skipUnless(hasattr(signal, 'SIGUSR1'), 'requires SIGUSR1') def test_ignore_listener(self): @@ -3581,26 +3535,221 @@ finally: conn.close() +class TestStartMethod(unittest.TestCase): + def test_set_get(self): + multiprocessing.set_forkserver_preload(PRELOAD) + count = 0 + old_method = multiprocessing.get_start_method() + try: + for method in ('fork', 'spawn', 'forkserver'): + try: + multiprocessing.set_start_method(method) + except ValueError: + continue + self.assertEqual(multiprocessing.get_start_method(), method) + count += 1 + finally: + multiprocessing.set_start_method(old_method) + self.assertGreaterEqual(count, 1) + + def test_get_all(self): + methods = multiprocessing.get_all_start_methods() + if sys.platform == 'win32': + self.assertEqual(methods, ['spawn']) + else: + self.assertTrue(methods == ['fork', 'spawn'] or + methods == ['fork', 'spawn', 'forkserver']) + # +# Check that killing process does not leak named semaphores # + + at unittest.skipIf(sys.platform == "win32", + "test semantics don't make sense on Windows") +class TestSemaphoreTracker(unittest.TestCase): + def test_semaphore_tracker(self): + import subprocess + cmd = '''if 1: + import multiprocessing as mp, time, os + mp.set_start_method("spawn") + lock1 = mp.Lock() + lock2 = mp.Lock() + os.write(%d, lock1._semlock.name.encode("ascii") + b"\\n") + os.write(%d, lock2._semlock.name.encode("ascii") + b"\\n") + time.sleep(10) + ''' + print("\nTestSemaphoreTracker will output warnings a bit like:\n" + " ... There appear to be 2 leaked semaphores" + " to clean up at shutdown\n" + " ... '/mp-03jgqz': [Errno 2] No such file or directory", + file=sys.stderr) + r, w = os.pipe() + p = subprocess.Popen([sys.executable, + #'-W', 'ignore:semaphore_tracker', + '-c', cmd % (w, w)], + pass_fds=[w]) + os.close(w) + with open(r, 'rb', closefd=True) as f: + name1 = f.readline().rstrip().decode('ascii') + name2 = f.readline().rstrip().decode('ascii') + _multiprocessing.sem_unlink(name1) + p.terminate() + p.wait() + time.sleep(1.0) + with self.assertRaises(OSError) as ctx: + _multiprocessing.sem_unlink(name2) + # docs say it should be ENOENT, but OSX seems to give EINVAL + self.assertIn(ctx.exception.errno, (errno.ENOENT, errno.EINVAL)) + # - -def setUpModule(): - if sys.platform.startswith("linux"): +# Mixins +# + +class ProcessesMixin(object): + TYPE = 'processes' + Process = multiprocessing.Process + connection = multiprocessing.connection + current_process = staticmethod(multiprocessing.current_process) + active_children = staticmethod(multiprocessing.active_children) + Pool = staticmethod(multiprocessing.Pool) + Pipe = staticmethod(multiprocessing.Pipe) + Queue = staticmethod(multiprocessing.Queue) + JoinableQueue = staticmethod(multiprocessing.JoinableQueue) + Lock = staticmethod(multiprocessing.Lock) + RLock = staticmethod(multiprocessing.RLock) + Semaphore = staticmethod(multiprocessing.Semaphore) + BoundedSemaphore = staticmethod(multiprocessing.BoundedSemaphore) + Condition = staticmethod(multiprocessing.Condition) + Event = staticmethod(multiprocessing.Event) + Barrier = staticmethod(multiprocessing.Barrier) + Value = staticmethod(multiprocessing.Value) + Array = staticmethod(multiprocessing.Array) + RawValue = staticmethod(multiprocessing.RawValue) + RawArray = staticmethod(multiprocessing.RawArray) + + +class ManagerMixin(object): + TYPE = 'manager' + Process = multiprocessing.Process + Queue = property(operator.attrgetter('manager.Queue')) + JoinableQueue = property(operator.attrgetter('manager.JoinableQueue')) + Lock = property(operator.attrgetter('manager.Lock')) + RLock = property(operator.attrgetter('manager.RLock')) + Semaphore = property(operator.attrgetter('manager.Semaphore')) + BoundedSemaphore = property(operator.attrgetter('manager.BoundedSemaphore')) + Condition = property(operator.attrgetter('manager.Condition')) + Event = property(operator.attrgetter('manager.Event')) + Barrier = property(operator.attrgetter('manager.Barrier')) + Value = property(operator.attrgetter('manager.Value')) + Array = property(operator.attrgetter('manager.Array')) + list = property(operator.attrgetter('manager.list')) + dict = property(operator.attrgetter('manager.dict')) + Namespace = property(operator.attrgetter('manager.Namespace')) + + @classmethod + def Pool(cls, *args, **kwds): + return cls.manager.Pool(*args, **kwds) + + @classmethod + def setUpClass(cls): + cls.manager = multiprocessing.Manager() + + @classmethod + def tearDownClass(cls): + # only the manager process should be returned by active_children() + # but this can take a bit on slow machines, so wait a few seconds + # if there are other children too (see #17395) + t = 0.01 + while len(multiprocessing.active_children()) > 1 and t < 5: + time.sleep(t) + t *= 2 + gc.collect() # do garbage collection + if cls.manager._number_of_objects() != 0: + # This is not really an error since some tests do not + # ensure that all processes which hold a reference to a + # managed object have been joined. + print('Shared objects which still exist at manager shutdown:') + print(cls.manager._debug_info()) + cls.manager.shutdown() + cls.manager.join() + cls.manager = None + + +class ThreadsMixin(object): + TYPE = 'threads' + Process = multiprocessing.dummy.Process + connection = multiprocessing.dummy.connection + current_process = staticmethod(multiprocessing.dummy.current_process) + active_children = staticmethod(multiprocessing.dummy.active_children) + Pool = staticmethod(multiprocessing.Pool) + Pipe = staticmethod(multiprocessing.dummy.Pipe) + Queue = staticmethod(multiprocessing.dummy.Queue) + JoinableQueue = staticmethod(multiprocessing.dummy.JoinableQueue) + Lock = staticmethod(multiprocessing.dummy.Lock) + RLock = staticmethod(multiprocessing.dummy.RLock) + Semaphore = staticmethod(multiprocessing.dummy.Semaphore) + BoundedSemaphore = staticmethod(multiprocessing.dummy.BoundedSemaphore) + Condition = staticmethod(multiprocessing.dummy.Condition) + Event = staticmethod(multiprocessing.dummy.Event) + Barrier = staticmethod(multiprocessing.dummy.Barrier) + Value = staticmethod(multiprocessing.dummy.Value) + Array = staticmethod(multiprocessing.dummy.Array) + +# +# Functions used to create test cases from the base ones in this module +# + +def install_tests_in_module_dict(remote_globs, start_method): + __module__ = remote_globs['__name__'] + local_globs = globals() + ALL_TYPES = {'processes', 'threads', 'manager'} + + for name, base in local_globs.items(): + if not isinstance(base, type): + continue + if issubclass(base, BaseTestCase): + if base is BaseTestCase: + continue + assert set(base.ALLOWED_TYPES) <= ALL_TYPES, base.ALLOWED_TYPES + for type_ in base.ALLOWED_TYPES: + newname = 'With' + type_.capitalize() + name[1:] + Mixin = local_globs[type_.capitalize() + 'Mixin'] + class Temp(base, Mixin, unittest.TestCase): + pass + Temp.__name__ = Temp.__qualname__ = newname + Temp.__module__ = __module__ + remote_globs[newname] = Temp + elif issubclass(base, unittest.TestCase): + class Temp(base, object): + pass + Temp.__name__ = Temp.__qualname__ = name + Temp.__module__ = __module__ + remote_globs[name] = Temp + + def setUpModule(): + multiprocessing.set_forkserver_preload(PRELOAD) + remote_globs['old_start_method'] = multiprocessing.get_start_method() try: - lock = multiprocessing.RLock() - except OSError: - raise unittest.SkipTest("OSError raises on RLock creation, " - "see issue 3111!") - check_enough_semaphores() - util.get_temp_dir() # creates temp directory for use by all processes - multiprocessing.get_logger().setLevel(LOG_LEVEL) - - -def tearDownModule(): - # pause a bit so we don't get warning about dangling threads/processes - time.sleep(0.5) - - -if __name__ == '__main__': - unittest.main() + multiprocessing.set_start_method(start_method) + except ValueError: + raise unittest.SkipTest(start_method + + ' start method not supported') + print('Using start method %r' % multiprocessing.get_start_method()) + + if sys.platform.startswith("linux"): + try: + lock = multiprocessing.RLock() + except OSError: + raise unittest.SkipTest("OSError raises on RLock creation, " + "see issue 3111!") + check_enough_semaphores() + util.get_temp_dir() # creates temp directory + multiprocessing.get_logger().setLevel(LOG_LEVEL) + + def tearDownModule(): + multiprocessing.set_start_method(remote_globs['old_start_method']) + # pause a bit so we don't get warning about dangling threads/processes + time.sleep(0.5) + + remote_globs['setUpModule'] = setUpModule + remote_globs['tearDownModule'] = tearDownModule diff --git a/Lib/test/mp_fork_bomb.py b/Lib/test/mp_fork_bomb.py --- a/Lib/test/mp_fork_bomb.py +++ b/Lib/test/mp_fork_bomb.py @@ -7,6 +7,11 @@ # correctly on Windows. However, we should get a RuntimeError rather # than the Windows equivalent of a fork bomb. +if len(sys.argv) > 1: + multiprocessing.set_start_method(sys.argv[1]) +else: + multiprocessing.set_start_method('spawn') + p = multiprocessing.Process(target=foo) p.start() p.join() diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py --- a/Lib/test/regrtest.py +++ b/Lib/test/regrtest.py @@ -149,7 +149,7 @@ except ImportError: threading = None try: - import multiprocessing.process + import _multiprocessing, multiprocessing.process except ImportError: multiprocessing = None diff --git a/Lib/test/test_multiprocessing_fork.py b/Lib/test/test_multiprocessing_fork.py new file mode 100644 --- /dev/null +++ b/Lib/test/test_multiprocessing_fork.py @@ -0,0 +1,7 @@ +import unittest +import test._test_multiprocessing + +test._test_multiprocessing.install_tests_in_module_dict(globals(), 'fork') + +if __name__ == '__main__': + unittest.main() diff --git a/Lib/test/test_multiprocessing_forkserver.py b/Lib/test/test_multiprocessing_forkserver.py new file mode 100644 --- /dev/null +++ b/Lib/test/test_multiprocessing_forkserver.py @@ -0,0 +1,7 @@ +import unittest +import test._test_multiprocessing + +test._test_multiprocessing.install_tests_in_module_dict(globals(), 'forkserver') + +if __name__ == '__main__': + unittest.main() diff --git a/Lib/test/test_multiprocessing_spawn.py b/Lib/test/test_multiprocessing_spawn.py new file mode 100644 --- /dev/null +++ b/Lib/test/test_multiprocessing_spawn.py @@ -0,0 +1,7 @@ +import unittest +import test._test_multiprocessing + +test._test_multiprocessing.install_tests_in_module_dict(globals(), 'spawn') + +if __name__ == '__main__': + unittest.main() diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -938,7 +938,9 @@ QUICKTESTOPTS= $(TESTOPTS) -x test_subprocess test_io test_lib2to3 \ test_multibytecodec test_urllib2_localnet test_itertools \ - test_multiprocessing test_mailbox test_socket test_poll \ + test_multiprocessing_fork test_multiprocessing_spawn \ + test_multiprocessing_forkserver \ + test_mailbox test_socket test_poll \ test_select test_zipfile test_concurrent_futures quicktest: all platform $(TESTRUNNER) $(QUICKTESTOPTS) diff --git a/Modules/_multiprocessing/multiprocessing.c b/Modules/_multiprocessing/multiprocessing.c --- a/Modules/_multiprocessing/multiprocessing.c +++ b/Modules/_multiprocessing/multiprocessing.c @@ -126,6 +126,7 @@ {"recv", multiprocessing_recv, METH_VARARGS, ""}, {"send", multiprocessing_send, METH_VARARGS, ""}, #endif + {"sem_unlink", _PyMp_sem_unlink, METH_VARARGS, ""}, {NULL} }; diff --git a/Modules/_multiprocessing/multiprocessing.h b/Modules/_multiprocessing/multiprocessing.h --- a/Modules/_multiprocessing/multiprocessing.h +++ b/Modules/_multiprocessing/multiprocessing.h @@ -98,5 +98,6 @@ */ extern PyTypeObject _PyMp_SemLockType; +extern PyObject *_PyMp_sem_unlink(PyObject *ignore, PyObject *args); #endif /* MULTIPROCESSING_H */ diff --git a/Modules/_multiprocessing/semaphore.c b/Modules/_multiprocessing/semaphore.c --- a/Modules/_multiprocessing/semaphore.c +++ b/Modules/_multiprocessing/semaphore.c @@ -18,6 +18,7 @@ int count; int maxvalue; int kind; + char *name; } SemLockObject; #define ISMINE(o) (o->count > 0 && PyThread_get_thread_ident() == o->last_tid) @@ -397,7 +398,8 @@ */ static PyObject * -newsemlockobject(PyTypeObject *type, SEM_HANDLE handle, int kind, int maxvalue) +newsemlockobject(PyTypeObject *type, SEM_HANDLE handle, int kind, int maxvalue, + char *name) { SemLockObject *self; @@ -409,21 +411,22 @@ self->count = 0; self->last_tid = 0; self->maxvalue = maxvalue; + self->name = name; return (PyObject*)self; } static PyObject * semlock_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { - char buffer[256]; SEM_HANDLE handle = SEM_FAILED; - int kind, maxvalue, value; + int kind, maxvalue, value, unlink; PyObject *result; - static char *kwlist[] = {"kind", "value", "maxvalue", NULL}; - static int counter = 0; + char *name, *name_copy = NULL; + static char *kwlist[] = {"kind", "value", "maxvalue", "name", "unlink", + NULL}; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "iii", kwlist, - &kind, &value, &maxvalue)) + if (!PyArg_ParseTupleAndKeywords(args, kwds, "iiisi", kwlist, + &kind, &value, &maxvalue, &name, &unlink)) return NULL; if (kind != RECURSIVE_MUTEX && kind != SEMAPHORE) { @@ -431,18 +434,23 @@ return NULL; } - PyOS_snprintf(buffer, sizeof(buffer), "/mp%ld-%d", (long)getpid(), counter++); + if (!unlink) { + name_copy = PyMem_Malloc(strlen(name) + 1); + if (name_copy == NULL) + goto failure; + strcpy(name_copy, name); + } SEM_CLEAR_ERROR(); - handle = SEM_CREATE(buffer, value, maxvalue); + handle = SEM_CREATE(name, value, maxvalue); /* On Windows we should fail if GetLastError()==ERROR_ALREADY_EXISTS */ if (handle == SEM_FAILED || SEM_GET_LAST_ERROR() != 0) goto failure; - if (SEM_UNLINK(buffer) < 0) + if (unlink && SEM_UNLINK(name) < 0) goto failure; - result = newsemlockobject(type, handle, kind, maxvalue); + result = newsemlockobject(type, handle, kind, maxvalue, name_copy); if (!result) goto failure; @@ -451,6 +459,7 @@ failure: if (handle != SEM_FAILED) SEM_CLOSE(handle); + PyMem_Free(name_copy); _PyMp_SetError(NULL, MP_STANDARD_ERROR); return NULL; } @@ -460,12 +469,30 @@ { SEM_HANDLE handle; int kind, maxvalue; + char *name, *name_copy = NULL; - if (!PyArg_ParseTuple(args, F_SEM_HANDLE "ii", - &handle, &kind, &maxvalue)) + if (!PyArg_ParseTuple(args, F_SEM_HANDLE "iiz", + &handle, &kind, &maxvalue, &name)) return NULL; - return newsemlockobject(type, handle, kind, maxvalue); + if (name != NULL) { + name_copy = PyMem_Malloc(strlen(name) + 1); + if (name_copy == NULL) + return PyErr_NoMemory(); + strcpy(name_copy, name); + } + +#ifndef MS_WINDOWS + if (name != NULL) { + handle = sem_open(name, 0); + if (handle == SEM_FAILED) { + PyMem_Free(name_copy); + return PyErr_SetFromErrno(PyExc_OSError); + } + } +#endif + + return newsemlockobject(type, handle, kind, maxvalue, name_copy); } static void @@ -473,6 +500,7 @@ { if (self->handle != SEM_FAILED) SEM_CLOSE(self->handle); + PyMem_Free(self->name); PyObject_Del(self); } @@ -574,6 +602,8 @@ ""}, {"maxvalue", T_INT, offsetof(SemLockObject, maxvalue), READONLY, ""}, + {"name", T_STRING, offsetof(SemLockObject, name), READONLY, + ""}, {NULL} }; @@ -621,3 +651,23 @@ /* tp_alloc */ 0, /* tp_new */ semlock_new, }; + +/* + * Function to unlink semaphore names + */ + +PyObject * +_PyMp_sem_unlink(PyObject *ignore, PyObject *args) +{ + char *name; + + if (!PyArg_ParseTuple(args, "s", &name)) + return NULL; + + if (SEM_UNLINK(name) < 0) { + _PyMp_SetError(NULL, MP_STANDARD_ERROR); + return NULL; + } + + Py_RETURN_NONE; +} -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 20:35:41 2013 From: python-checkins at python.org (david.wolever) Date: Wed, 14 Aug 2013 20:35:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NzE4?= =?utf-8?q?=3A_datetime_documentation_contradictory_on_leap_second_support?= Message-ID: <3cFfcY4tP5z7Lk0@mail.python.org> http://hg.python.org/cpython/rev/5063dab96843 changeset: 85170:5063dab96843 branch: 2.7 parent: 85168:ac2f59a6637f user: David Wolever date: Wed Aug 14 14:33:54 2013 -0400 summary: Issue #18718: datetime documentation contradictory on leap second support files: Doc/library/datetime.rst | 10 +++------- Misc/NEWS | 3 +++ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -1664,7 +1664,7 @@ | ``%M`` | Minute as a zero-padded | 00, 01, ..., 59 | | | | decimal number. | | | +-----------+--------------------------------+------------------------+-------+ -| ``%S`` | Second as a zero-padded | 00, 01, ..., 61 | \(3) | +| ``%S`` | Second as a zero-padded | 00, 01, ..., 59 | \(3) | | | decimal number. | | | +-----------+--------------------------------+------------------------+-------+ | ``%f`` | Microsecond as a decimal | 000000, 000001, ..., | \(4) | @@ -1728,12 +1728,8 @@ the output hour field if the ``%I`` directive is used to parse the hour. (3) - The range really is ``0`` to ``61``; according to the Posix standard this - accounts for leap seconds and the (very rare) double leap seconds. - The :mod:`time` module may produce and does accept leap seconds since - it is based on the Posix standard, but the :mod:`datetime` module - does not accept leap seconds in :meth:`strptime` input nor will it - produce them in :func:`strftime` output. + Unlike the :mod:`time` module, the :mod:`datetime` module does not support + leap seconds. (4) ``%f`` is an extension to the set of format characters in the C standard diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -166,7 +166,10 @@ Documentation ------------- +- Issue #18718: datetime documentation contradictory on leap second support. + - Issue #17701: Improving strftime documentation. + - Issue #17844: Refactor a documentation of Python specific encodings. Add links to encoders and decoders for binary-to-binary codecs. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 20:43:50 2013 From: python-checkins at python.org (david.wolever) Date: Wed, 14 Aug 2013 20:43:50 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Remove_errant_fourth_=27?= =?utf-8?q?=2E=27_from_ellipsis_in_datetime_documentation=2E?= Message-ID: <3cFfny0z5YzNfc@mail.python.org> http://hg.python.org/cpython/rev/d48faf442569 changeset: 85171:d48faf442569 parent: 85169:3b82e0d83bf9 user: David Wolever date: Wed Aug 14 14:41:48 2013 -0400 summary: Remove errant fourth '.' from ellipsis in datetime documentation. files: Doc/library/datetime.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -1828,7 +1828,7 @@ | | zero-padded decimal number. | | | +-----------+--------------------------------+------------------------+-------+ | ``%Y`` | Year with century as a decimal | 0001, 0002, ..., 2013, | \(2) | -| | number. | 2014, ...., 9998, 9999 | | +| | number. | 2014, ..., 9998, 9999 | | +-----------+--------------------------------+------------------------+-------+ | ``%H`` | Hour (24-hour clock) as a | 00, 01, ..., 23 | | | | zero-padded decimal number. | | | -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 20:43:51 2013 From: python-checkins at python.org (david.wolever) Date: Wed, 14 Aug 2013 20:43:51 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_Remove_errant_?= =?utf-8?q?fourth_=27=2E=27_from_ellipsis_in_datetime_documentation=2E?= Message-ID: <3cFfnz3yWWz7Ljy@mail.python.org> http://hg.python.org/cpython/rev/b7a9c14fc082 changeset: 85172:b7a9c14fc082 branch: 3.3 parent: 85166:8f9bc9283400 user: David Wolever date: Wed Aug 14 14:41:48 2013 -0400 summary: Remove errant fourth '.' from ellipsis in datetime documentation. files: Doc/library/datetime.rst | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -1406,7 +1406,7 @@ Return a string representing the time, controlled by an explicit format string. For a complete list of formatting directives, see - :ref:`strftime-strptime-behavior`. + :ref:`strftime-strptime-behavior`. .. method:: time.__format__(format) @@ -1826,7 +1826,7 @@ | | zero-padded decimal number. | | | +-----------+--------------------------------+------------------------+-------+ | ``%Y`` | Year with century as a decimal | 0001, 0002, ..., 2013, | \(2) | -| | number. | 2014, ...., 9998, 9999 | | +| | number. | 2014, ..., 9998, 9999 | | +-----------+--------------------------------+------------------------+-------+ | ``%H`` | Hour (24-hour clock) as a | 00, 01, ..., 23 | | | | zero-padded decimal number. | | | -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 14 20:52:26 2013 From: python-checkins at python.org (brett.cannon) Date: Wed, 14 Aug 2013 20:52:26 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?devguide=3A_Record_Donald_Stufft=27s_?= =?utf-8?q?access_privileges?= Message-ID: <3cFfzt3q2jz7LjX@mail.python.org> http://hg.python.org/devguide/rev/ab034cbd357d changeset: 636:ab034cbd357d user: Brett Cannon date: Wed Aug 14 14:52:20 2013 -0400 summary: Record Donald Stufft's access privileges files: developers.rst | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/developers.rst b/developers.rst --- a/developers.rst +++ b/developers.rst @@ -24,6 +24,9 @@ Permissions History ------------------- +- Donald Stufft was given push privileges on Aug 14 2013 by BAC, for PEP + editing, on the recommendation of Nick Coghlan. + - Ethan Furman was given push privileges on May 11 2013 by BAC, for PEP 435 work, on the recommendation of Eli Bendersky. -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Thu Aug 15 02:18:31 2013 From: python-checkins at python.org (eric.snow) Date: Thu, 15 Aug 2013 02:18:31 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogaXNzdWUgIzE4Njk4?= =?utf-8?q?=3A_ensure_importlib=2Ereload=28=29_returns_the_module_out_of_s?= =?utf-8?q?ys=2Emodules=2E?= Message-ID: <3cFpD751Fwz7LjX@mail.python.org> http://hg.python.org/cpython/rev/e22e7268e58a changeset: 85173:e22e7268e58a branch: 3.3 user: Eric Snow date: Wed Aug 14 18:03:34 2013 -0600 summary: issue #18698: ensure importlib.reload() returns the module out of sys.modules. files: Lib/imp.py | 4 +++- Lib/test/test_imp.py | 17 +++++++++++++++++ Misc/NEWS | 2 ++ 3 files changed, 22 insertions(+), 1 deletions(-) diff --git a/Lib/imp.py b/Lib/imp.py --- a/Lib/imp.py +++ b/Lib/imp.py @@ -268,7 +268,9 @@ if parent_name and parent_name not in sys.modules: msg = "parent {!r} not in sys.modules" raise ImportError(msg.format(parent_name), name=parent_name) - return module.__loader__.load_module(name) + module.__loader__.load_module(name) + # The module may have replaced itself in sys.modules! + return sys.modules[module.__name__] finally: try: del _RELOADING[name] diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -5,6 +5,7 @@ import shutil import sys from test import support +from test.test_importlib import util import unittest import warnings @@ -285,6 +286,22 @@ with self.assertRaisesRegex(ImportError, 'html'): imp.reload(parser) + def test_module_replaced(self): + # see #18698 + def code(): + module = type(sys)('top_level') + module.spam = 3 + sys.modules['top_level'] = module + mock = util.mock_modules('top_level', + module_code={'top_level': code}) + with mock: + with util.import_state(meta_path=[mock]): + module = importlib.import_module('top_level') + reloaded = imp.reload(module) + actual = sys.modules['top_level'] + self.assertEqual(actual.spam, 3) + self.assertEqual(reloaded.spam, 3) + class PEP3147Tests(unittest.TestCase): """Tests of PEP 3147.""" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -202,6 +202,8 @@ - Issue #17269: Workaround for socket.getaddrinfo crash on MacOS X with port None or "0" and flags AI_NUMERICSERV. +- Issue #18698: Ensure imp.reload() returns the module out of sys.modules. + - Issue #18080: When building a C extension module on OS X, if the compiler is overriden with the CC environment variable, use the new compiler as the default for linking if LDSHARED is not also overriden. This restores -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 02:18:32 2013 From: python-checkins at python.org (eric.snow) Date: Thu, 15 Aug 2013 02:18:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Null_merge_from_3=2E3=2E?= Message-ID: <3cFpD8754kz7LjX@mail.python.org> http://hg.python.org/cpython/rev/4ee0f0ef92fc changeset: 85174:4ee0f0ef92fc parent: 85171:d48faf442569 parent: 85173:e22e7268e58a user: Eric Snow date: Wed Aug 14 18:08:39 2013 -0600 summary: Null merge from 3.3. files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 02:18:34 2013 From: python-checkins at python.org (eric.snow) Date: Thu, 15 Aug 2013 02:18:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Closes_issue_=2318698=3A_e?= =?utf-8?q?nsure_importlib=2Ereload=28=29_returns_the_module_out_of?= Message-ID: <3cFpDB1tDGz7LkJ@mail.python.org> http://hg.python.org/cpython/rev/1af087712e69 changeset: 85175:1af087712e69 user: Eric Snow date: Wed Aug 14 18:11:09 2013 -0600 summary: Closes issue #18698: ensure importlib.reload() returns the module out of sys.modules. files: Lib/importlib/__init__.py | 4 +++- Lib/test/test_importlib/test_api.py | 16 ++++++++++++++++ Misc/NEWS | 2 ++ 3 files changed, 21 insertions(+), 1 deletions(-) diff --git a/Lib/importlib/__init__.py b/Lib/importlib/__init__.py --- a/Lib/importlib/__init__.py +++ b/Lib/importlib/__init__.py @@ -118,7 +118,9 @@ if parent_name and parent_name not in sys.modules: msg = "parent {!r} not in sys.modules" raise ImportError(msg.format(parent_name), name=parent_name) - return module.__loader__.load_module(name) + module.__loader__.load_module(name) + # The module may have replaced itself in sys.modules! + return sys.modules[module.__name__] finally: try: del _RELOADING[name] diff --git a/Lib/test/test_importlib/test_api.py b/Lib/test/test_importlib/test_api.py --- a/Lib/test/test_importlib/test_api.py +++ b/Lib/test/test_importlib/test_api.py @@ -162,6 +162,22 @@ module = importlib.import_module(mod) importlib.reload(module) + def test_module_replaced(self): + def code(): + import sys + module = type(sys)('top_level') + module.spam = 3 + sys.modules['top_level'] = module + mock = util.mock_modules('top_level', + module_code={'top_level': code}) + with mock: + with util.import_state(meta_path=[mock]): + module = importlib.import_module('top_level') + reloaded = importlib.reload(module) + actual = sys.modules['top_level'] + self.assertEqual(actual.spam, 3) + self.assertEqual(reloaded.spam, 3) + class InvalidateCacheTests(unittest.TestCase): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -203,6 +203,8 @@ - Issue #17867: Raise an ImportError if __import__ is not found in __builtins__. +- Issue #18698: Ensure importlib.reload() returns the module out of sys.modules. + - Issue #17857: Prevent build failures with pre-3.5.0 versions of sqlite3, such as was shipped with Centos 5 and Mac OS X 10.4. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 04:01:39 2013 From: python-checkins at python.org (eric.snow) Date: Thu, 15 Aug 2013 04:01:39 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Add_the_initial_version_of_PE?= =?utf-8?q?P_451=2E?= Message-ID: <3cFrW75ynxz7LjS@mail.python.org> http://hg.python.org/peps/rev/bb01b8da50d8 changeset: 5056:bb01b8da50d8 user: Eric Snow date: Wed Aug 14 19:59:31 2013 -0600 summary: Add the initial version of PEP 451. files: pep-0451.txt | 611 +++++++++++++++++++++++++++++++++++++++ 1 files changed, 611 insertions(+), 0 deletions(-) diff --git a/pep-0451.txt b/pep-0451.txt new file mode 100644 --- /dev/null +++ b/pep-0451.txt @@ -0,0 +1,611 @@ +PEP: 451 +Title: A ModuleSpec Type for the Import System +Version: $Revision$ +Last-Modified: $Date$ +Author: Eric Snow +Discussions-To: import-sig at python.org +Status: Draft +Type: Standards Track +Content-Type: text/x-rst +Created: 8-Aug-2013 +Python-Version: 3.4 +Post-History: 8-Aug-2013 +Resolution: + + +Abstract +======== + +This PEP proposes to add a new class to ``importlib.machinery`` called +``ModuleSpec``. It will contain all the import-related information +about a module without needing to load the module first. Finders will +now return a module's spec rather than a loader. The import system will +use the spec to load the module. + + +Motivation +========== + +The import system has evolved over the lifetime of Python. In late 2002 +PEP 302 introduced standardized import hooks via ``finders`` and +``loaders`` and ``sys.meta_path``. The ``importlib`` module, introduced +with Python 3.1, now exposes a pure Python implementation of the APIs +described by PEP 302, as well as of the full import system. It is now +much easier to understand and extend the import system. While a benefit +to the Python community, this greater accessibilty also presents a +challenge. + +As more developers come to understand and customize the import system, +any weaknesses in the finder and loader APIs will be more impactful. So +the sooner we can address any such weaknesses the import system, the +better...and there are a couple we can take care of with this proposal. + +Firstly, any time the import system needs to save information about a +module we end up with more attributes on module objects that are +generally only meaningful to the import system and occasionally to some +people. It would be nice to have a per-module namespace to put future +import-related information. Secondly, there's an API void between +finders and loaders that causes undue complexity when encountered. + +Currently finders are strictly responsible for providing the loader +which the import system will use to load the module. The loader is then +responsible for doing some checks, creating the module object, setting +import-related attributes, "installing" the module to ``sys.modules``, +and loading the module, along with some cleanup. This all takes place +during the import system's call to ``Loader.load_module()``. Loaders +also provide some APIs for accessing data associated with a module. + +Loaders are not required to provide any of the functionality of +``load_module()`` through other methods. Thus, though the import- +related information about a module is likely available without loading +the module, it is not otherwise exposed. + +Furthermore, the requirements assocated with ``load_module()`` are +common to all loaders and mostly are implemented in exactly the same +way. This means every loader has to duplicate the same boilerplate +code. ``importlib.util`` provides some tools that help with this, but +it would be more helpful if the import system simply took charge of +these responsibilities. The trouble is that this would limit the degree +of customization that ``load_module()`` facilitates. This is a gap +between finders and loaders which this proposal aims to fill. + +Finally, when the import system calls a finder's ``find_module()``, the +finder makes use of a variety of information about the module that is +useful outside the context of the method. Currently the options are +limited for persisting that per-module information past the method call, +since it only returns the loader. Popular options for this limitation +are to store the information in a module-to-info mapping somewhere on +the finder itself, or store it on the loader. + +Unfortunately, loaders are not required to be module-specific. On top +of that, some of the useful information finders could provide is +common to all finders, so ideally the import system could take care of +that. This is the same gap as before between finders and loaders. + +As an example of complexity attributable to this flaw, the +implementation of namespace packages in Python 3.3 (see PEP 420) added +``FileFinder.find_loader()`` because there was no good way for +``find_module()`` to provide the namespace path. + +The answer to this gap is a ``ModuleSpec`` object that contains the +per-module information and takes care of the boilerplate functionality +of loading the module. + +(The idea gained momentum during discussions related to another PEP.[1]) + + +Specification +============= + +The goal is to address the gap between finders and loaders while +changing as little of their semantics as possible. Though some +functionality and information is moved to the new ``ModuleSpec`` type, +their semantics should remain the same. However, for the sake of +clarity, those semantics will be explicitly identified. + +ModuleSpec +---------- + +A new class which defines the import-related values to use when loading +the module. It closely corresponds to the import-related attributes of +module objects. ``ModuleSpec`` objects may also be used by finders and +loaders and other import-related APIs to hold extra import-related +state about the module. This greatly reduces the need to add any new +new import-related attributes to module objects, and loader ``__init__`` +methods won't need to accommodate such per-module state. + +Creating a ModuleSpec: + +``ModuleSpec(name, loader, *, origin=None, filename=None, cached=None, +path=None)`` + +Passed in parameter values are assigned directly to the corresponding +attributes below. Other attributes not listed as parameters (such as +``package``) are read-only properties that are automatically derived +from these values. + +The ``ModuleSpec.from_loader()`` class method allows a suitable +ModuleSpec instance to be easily created from a PEP 302 loader object. + +ModuleSpec Attributes +--------------------- + +Each of the following names is an attribute on ``ModuleSpec`` objects. +A value of ``None`` indicates "not set". This contrasts with module +objects where the attribute simply doesn't exist. + +While ``package`` and ``is_package`` are read-only properties, the +remaining attributes can be replaced after the module spec is created +and after import is complete. This allows for unusual cases where +modifying the spec is the best option. However, typical use should not +involve changing the state of a module's spec. + +Most of the attributes correspond to the import-related attributes of +modules. Here is the mapping, followed by a description of the +attributes. The reverse of this mapping is used by +``init_module_attrs()``. + +============= =========== +On ModuleSpec On Modules +============= =========== +name __name__ +loader __loader__ +package __package__ +is_package - +origin - +filename __file__ +cached __cached__ +path __path__ +============= =========== + +``name`` + +The module's fully resolved and absolute name. It must be set. + +``loader`` + +The loader to use during loading and for module data. These specific +functionalities do not change for loaders. Finders are still +responsible for creating the loader and this attribute is where it is +stored. The loader must be set. + +``package`` + +The name of the module's parent. This is a dynamic attribute with a +value derived from ``name`` and ``is_package``. For packages it is the +value of ``name``. Otherwise it is equivalent to +``name.rpartition('.')[0]``. Consequently, a top-level module will have +the empty string for ``package``. + + +``is_package`` + +Whether or not the module is a package. This dynamic attribute is True +if ``path`` is not None (e.g. the empty list is a "true" value), else it +is false. + +``origin`` + +A string for the location from which the module originates. If +``filename`` is set, ``origin`` should be set to the same value unless +some other value is more appropriate. ``origin`` is used in +``module_repr()`` if it does not match the value of ``filename``. + +Using ``filename`` for this meaning would be inaccurate, since not all +modules have path-based locations. For instance, built-in modules do +not have ``__file__`` set. Yet it is useful to have a descriptive +string indicating that it originated from the interpreter as a built-in +module. So built-in modules will have ``origin`` set to ``"built-in"``. + +Path-based attributes: + +If any of these is set, it indicates that the module is path-based. For +reference, a path entry is a string for a location where the import +system will look for modules, e.g. the path entries in ``sys.path`` or a +package's ``__path__``). + +``filename`` + +Like ``origin``, but limited to a path-based location. If ``filename`` +is set, ``origin`` should be set to the same string, unless origin is +explicitly set to something else. ``filename`` is not necessarily an +actual file name, but could be any location string based on a path +entry. Regarding the attribute name, while it is potentially +inaccurate, it is both consistent with the equivalent module attribute +and generally accurate. + +.. XXX Would a different name be better? ``path_location``? + +``cached`` + +The path-based location where the compiled code for a module should be +stored. If ``filename`` is set to a source file, this should be set to +corresponding path that PEP 3147 specifies. The +``importlib.util.source_to_cache()`` function facilitates getting the +correct value. + +``path`` + +The list of path entries in which to search for submodules if this +module is a package. Otherwise it is ``None``. + +.. XXX add a path-based subclass? + +ModuleSpec Methods +------------------ + +``from_loader(name, loader, *, is_package=None, origin=None, filename=None, cached=None, path=None)`` + +.. XXX use a different name? + +A factory classmethod that returns a new ``ModuleSpec`` derived from the +arguments. ``is_package`` is used inside the method to indicate that +the module is a package. If not explicitly passed in, it is set to +``True`` if ``path`` is passed in. It falls back to using the result of +the loader's ``is_package()``, if available. Finally it defaults to +False. The remaining parameters have the same meaning as the +corresponding ``ModuleSpec`` attributes. + +In contrast to ``ModuleSpec.__init__()``, which takes the arguments +as-is, ``from_loader()`` calculates missing values from the ones passed +in, as much as possible. This replaces the behavior that is currently +provided by several ``importlib.util`` functions as well as the optional +``init_module_attrs()`` method of loaders. Just to be clear, here is a +more detailed description of those calculations:: + + If not passed in, ``filename`` is to the result of calling the + loader's ``get_filename()``, if available. Otherwise it stays + unset (``None``). + + If not passed in, ``path`` is set to an empty list if + ``is_package`` is true. Then the directory from ``filename`` is + appended to it, if possible. If ``is_package`` is false, ``path`` + stays unset. + + If ``cached`` is not passed in and ``filename`` is passed in, + ``cached`` is derived from it. For filenames with a source suffix, + it set to the result of calling + ``importlib.util.cache_from_source()``. For bytecode suffixes (e.g. + ``.pyc``), ``cached`` is set to the value of ``filename``. If + ``filename`` is not passed in or ``cache_from_source()`` raises + ``NotImplementedError``, ``cached`` stays unset. + + If not passed in, ``origin`` is set to ``filename``. Thus if + ``filename`` is unset, ``origin`` stays unset. + +``module_repr()`` + +Returns a repr string for the module if ``origin`` is set and +``filename`` is not set. The string refers to the value of ``origin``. +Otherwise ``module_repr()`` returns None. This indicates to the module +type's ``__repr__()`` that it should fall back to the default repr. + +We could also have ``module_repr()`` produce the repr for the case where +``filename`` is set or where ``origin`` is not set, mirroring the repr +that the module type produces directly. However, the repr string is +derived from the import-related module attributes, which might be out of +sync with the spec. + +.. XXX Is using the spec close enough? Probably not. + +The implementation of the module type's ``__repr__()`` will change to +accommodate this PEP. However, the current functionality will remain to +handle the case where a module does not have a ``__spec__`` attribute. + +.. XXX Clarify the above justification. + +``init_module_attrs(module)`` + +Sets the module's import-related attributes to the corresponding values +in the module spec. If a path-based attribute is not set on the spec, +it is not set on the module. For the rest, a ``None`` value on the spec +(aka "not set") means ``None`` will be set on the module. If any of the +attributes are already set on the module, the existing values are +replaced. The module's own ``__spec__`` is not consulted but does get +replaced with the spec on which ``init_module_attrs()`` was called. +The earlier mapping of ``ModuleSpec`` attributes to module attributes +indicates which attributes are involved on both sides. + +``load(module=None, *, is_reload=False)`` + +This method captures the current functionality of and requirements on +``Loader.load_module()`` without any semantic changes, except one. +Reloading a module when ``exec_module()`` is available actually uses +``module`` rather than ignoring it in favor of the one in +``sys.modules``, as ``Loader.load_module()`` does. + +``module`` is only allowed when ``is_reload`` is true. This means that +``is_reload`` could be dropped as a parameter. However, doing so would +mean we could not use ``None`` to indicate that the module should be +pulled from ``sys.modules``. Furthermore, ``is_reload`` makes the +intent of the call clear. + +There are two parts to what happens in ``load()``. First, the module is +prepared, loaded, updated appropriately, and left available for the +second part. This is described in more detail shortly. + +Second, in the case of error during a normal load (not reload) the +module is removed from ``sys.modules``. If no error happened, the +module is pulled from ``sys.modules``. This the module returned by +``load()``. Before it is returned, if it is a different object than the +one produced by the first part, attributes of the module from +``sys.modules`` are updated to reflect the spec. + +Returning the module from ``sys.modules`` accommodates the ability of +the module to replace itself there while it is executing (during load). + +As already noted, this is what already happens in the import system. +``load()`` is not meant to change any of this behavior. + +Regarding the first part of ``load()``, the following describes what +happens. It depends on if ``is_reload`` is true and if the loader has +``exec_module()``. + +For normal load with ``exec_module()`` available:: + + A new module is created, ``init_module_attrs()`` is called to set + its attributes, and it is set on sys.modules. At that point + the loader's ``exec_module()`` is called, after which the module + is ready for the second part of loading. + +.. XXX What if the module already exists in sys.modules? + +For normal load without ``exec_module()`` available:: + + The loader's ``load_module()`` is called and the attributes of the + module it returns are updated to match the spec. + +For reload with ``exec_module()`` available:: + + If ``module`` is ``None``, it is pulled from ``sys.modules``. If + still ``None``, ImportError is raised. Otherwise ``exec_module()`` + is called, passing in the module-to-be-reloaded. + +For reload without ``exec_module()`` available:: + + The loader's ``load_module()`` is called and the attributes of the + module it returns are updated to match the spec. + +There is some boilerplate involved when ``exec_module()`` is available, +but only the boilerplate that the import system uses currently. + +If ``loader`` is not set (``None``), ``load()`` raises a ValueError. If +``module`` is passed in but ``is_reload`` is false, a ValueError is also +raises to indicate that ``load()`` was called incorrectly. There may be +use cases for calling ``load()`` in that way, but they are outside the +scope of this PEP + +.. XXX add reload(module=None) and drop load()'s parameters entirely? +.. XXX add more of importlib.reload()'s boilerplate to load()/reload()? + +Omitted Attributes and Methods +------------------------------ + +``ModuleSpec`` does not have a ``from_module()`` factory method since +all modules should already have a spec. + +Additionally, there is no ``PathModuleSpec`` subclass of ``ModuleSpec`` +that provides the ``filename``, ``cached``, and ``path`` functionality. +While that might make the separation cleaner, module objects don't have +that distinction. ``ModuleSpec`` will support both cases equally well. + +Backward Compatibility +---------------------- + +Since ``Finder.find_module()`` methods would now return a module spec +instead of loader, specs must act like the loader that would have been +returned instead. This is relatively simple to solve since the loader +is available as an attribute of the spec. We will use ``__getattr__()`` +to do it. + +However, ``ModuleSpec.is_package`` (an attribute) conflicts with +``InspectLoader.is_package()`` (a method). Working around this requires +a more complicated solution but is not a large obstacle. Simply making +``ModuleSpec.is_package`` a method does not reflect that is a relatively +static piece of data. ``module_repr()`` also conflicts with the same +method on loaders, but that workaround is not complicated since both are +methods. + +Unfortunately, the ability to proxy does not extend to ``id()`` +comparisons and ``isinstance()`` tests. In the case of the return value +of ``find_module()``, we accept that break in backward compatibility. +However, we will mitigate the problem with ``isinstance()`` somewhat by +registering ``ModuleSpec`` on the loaders in ``importlib.abc``. + +Subclassing +----------- + +Subclasses of ModuleSpec are allowed, but should not be necessary. +Adding functionality to a custom finder or loader will likely be a +better fit and should be tried first. However, as long as a subclass +still fulfills the requirements of the import system, objects of that +type are completely fine as the return value of ``find_module()``. + +Module Objects +-------------- + +Module objects will now have a ``__spec__`` attribute to which the +module's spec will be bound. None of the other import-related module +attributes will be changed or deprecated, though some of them could be; +any such deprecation can wait until Python 4. + +``ModuleSpec`` objects will not be kept in sync with the corresponding +module object's import-related attributes. Though they may differ, in +practice they will typically be the same. + +One notable exception is that case where a module is run as a script by +using the ``-m`` flag. In that case ``module.__spec__.name`` will +reflect the actual module name while ``module.__name__`` will be +``__main__``. + +The ``__file__`` attribute will be set where applicable in the same way +it is now. For instance, zip imports will still have it set for +backward-compatibility reasons. However, the recommendation will be to +have ``__file__`` set only for actual filenames from now on. + +Finders +------- + +Finders will now return ModuleSpec objects when ``find_module()`` is +called rather than loaders. For backward compatility, ``Modulespec`` +objects proxy the attributes of their ``loader`` attribute. + +Adding another similar method to avoid backward-compatibility issues +is undersireable if avoidable. The import APIs have suffered enough, +especially considering ``PathEntryFinder.find_loader()`` was just +added in Python 3.3. The approach taken by this PEP should be +sufficient to address backward-compatibility issues for +``find_module()``. + +The change to ``find_module()`` applies to both ``MetaPathFinder`` and +``PathEntryFinder``. ``PathEntryFinder.find_loader()`` will be +deprecated and, for backward compatibility, implicitly special-cased if +the method exists on a finder. + +Finders are still responsible for creating the loader. That loader will +now be stored in the module spec returned by ``find_module()`` rather +than returned directly. As is currently the case without the PEP, if a +loader would be costly to create, that loader can be designed to defer +the cost until later. + +Loaders +------- + +Loaders will have a new method, ``exec_module(module)``. Its only job +is to "exec" the module and consequently populate the module's +namespace. It is not responsible for creating or preparing the module +object, nor for any cleanup afterward. It has no return value. + +The ``load_module()`` of loaders will still work and be an active part +of the loader API. It is still useful for cases where the default +module creation/prepartion/cleanup is not appropriate for the loader. + +For example, the C API for extension modules only supports the full +control of ``load_module()``. As such, ``ExtensionFileLoader`` will not +implement ``exec_module()``. In the future it may be appropriate to +produce a second C API that would support an ``exec_module()`` +implementation for ``ExtensionFileLoader``. Such a change is outside +the scope of this PEP. + +A loader must define either ``exec_module()`` or ``load_module()``. If +both exist on the loader, ``ModuleSpec.load()`` uses ``exec_module()`` +and ignores ``load_module()``. + +PEP 420 introduced the optional ``module_repr()`` loader method to limit +the amount of special-casing in the module type's ``__repr__()``. Since +this method is part of ``ModuleSpec``, it will be deprecated on loaders. +However, if it exists on a loader it will be used exclusively. + +``Loader.init_module_attr()`` method, added prior to Python 3.4's +release , will be removed in favor of the same method on ``ModuleSpec``. + +However, ``InspectLoader.is_package()`` will not be deprecated even +though the same information is found on ``ModuleSpec``. ``ModuleSpec`` +can use it to populate its own ``is_package`` if that information is +not otherwise available. Still, it will be made optional. + +The path-based loaders in ``importlib`` take arguments in their +``__init__()`` and have corresponding attributes. However, the need for +those values is eliminated. The only exception is +``FileLoader.get_filename()``, which uses ``self.path``. The signatures +for these loaders and the accompanying attributes will be deprecated. + +In addition to executing a module during loading, loaders will still be +directly responsible for providing APIs concerning module-related data. + +Other Changes +------------- + +* The various finders and loaders provided by ``importlib`` will be + updated to comply with this proposal. + +* The spec for the ``__main__`` module will reflect how the interpreter + was started. For instance, with ``-m`` the spec's name will be that + of the run module, while ``__main__.__name__`` will still be + "__main__". + +* We add ``importlib.find_module()`` to mirror + ``importlib.find_loader()`` (which becomes deprecated). + +* Deprecations in ``importlib.util``: ``set_package()``, + ``set_loader()``, and ``module_for_loader()``. ``module_to_load()`` + (introduced prior to Python 3.4's release) can be removed. + +* ``importlib.reload()`` is changed to use ``ModuleSpec.load()``. + +* ``ModuleSpec.load()`` and ``importlib.reload()`` will now make use of + the per-module import lock, whereas ``Loader.load_module()`` did not. + +Reference Implementation +------------------------ + +A reference implementation is available at . + + +Open Questions +============== + +* How to avoid having custom ModuleSpec attributes conflict with future + normal attributes? + +This could be done with a sub-namespace bound to a single ModuleSpec +attribute. It could also be done by reserving names with a single +leading underscore for custom attributes. Or we could just not worry +about it. + +* Get rid of the ``is_package`` property? + +It duplicates information +both in the ``ModuleSpec()`` signature and in attributes. It is +technically unncessary in light of the path attribute and it conflicts +with ``InspectLoader.is_package()``, which makes the implementation more +complicated. However, it also provides an explicit indicator of +package-ness, which helps those less familiar with the import system. + +* Deprecate the use of ``__file__`` for anything except actual files? + +* Introduce a new extension module API that takes advantage of + ``ModuleSpec``? I'd rather that be part of a separate proposal. + +* Add ``create_module()`` to loaders? + +It would take a ``ModuleSpec`` +and return the module that should be passed to ``spec.exec()``. This +method would be helpful for new extension module import APIs. + +* Have ``ModuleSpec.module_repr()`` replace more of the module type's + ``__repr__()`` implementation? + +A compliant module is required to have +``__spec__`` set so that should work. However, currently the repr uses +the module attributes. Using the spec attributes would give precedence +to the spec in the case that they differ, which would be +backward-incompatible. + +* Factor the path-based attributes/functionality into a subclass-- + something like ``PathModuleSpec``? + +It looks like there just isn't enough benefit to doing so. + + +References +========== + +[1] http://mail.python.org/pipermail/import-sig/2013-August/000658.html + + +Copyright +========= + +This document has been placed in the public domain. + + +.. + Local Variables: + mode: indented-text + indent-tabs-mode: nil + sentence-end-double-space: t + fill-column: 70 + coding: utf-8 + End: + -- Repository URL: http://hg.python.org/peps From solipsis at pitrou.net Thu Aug 15 05:55:41 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Thu, 15 Aug 2013 05:55:41 +0200 Subject: [Python-checkins] Daily reference leaks (1af087712e69): sum=0 Message-ID: results for 1af087712e69 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogeVVjBp', '-x'] From python-checkins at python.org Thu Aug 15 11:19:15 2013 From: python-checkins at python.org (raymond.hettinger) Date: Thu, 15 Aug 2013 11:19:15 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Hoist_the_global_=22dummy?= =?utf-8?q?=22_lookup_outside_of_the_reinsertion_loop=2E?= Message-ID: <3cG2D31Z87zRhQ@mail.python.org> http://hg.python.org/cpython/rev/9c019475bb2f changeset: 85176:9c019475bb2f user: Raymond Hettinger date: Thu Aug 15 02:18:55 2013 -0700 summary: Hoist the global "dummy" lookup outside of the reinsertion loop. files: Objects/setobject.c | 4 +++- 1 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -280,6 +280,7 @@ Py_ssize_t i; int is_oldtable_malloced; setentry small_copy[PySet_MINSIZE]; + PyObject *dummy_entry; assert(minused >= 0); @@ -336,11 +337,12 @@ /* Copy the data over; this is refcount-neutral for active entries; dummy entries aren't copied over, of course */ + dummy_entry = dummy; for (entry = oldtable; i > 0; entry++) { if (entry->key == NULL) { /* UNUSED */ ; - } else if (entry->key == dummy) { + } else if (entry->key == dummy_entry) { /* DUMMY */ --i; assert(entry->key == dummy); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 11:40:42 2013 From: python-checkins at python.org (victor.stinner) Date: Thu, 15 Aug 2013 11:40:42 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_it=27s_not_easy_to?= =?utf-8?q?_make_listening_sockets_non-inheritable_on_Windows?= Message-ID: <3cG2hp2pv9z7Lk6@mail.python.org> http://hg.python.org/peps/rev/94989edaf61a changeset: 5057:94989edaf61a user: Victor Stinner date: Wed Aug 14 13:20:11 2013 +0200 summary: PEP 446: it's not easy to make listening sockets non-inheritable on Windows files: pep-0446.txt | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -85,6 +85,8 @@ `_ * `Q315939: PRB: Child Inherits Unintended Handles During CreateProcess Call `_ +* `Stackoverflow: Can TCP SOCKET handles be set not inheritable? + `_ Inheritance of File Descriptors on UNIX -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Aug 15 11:40:43 2013 From: python-checkins at python.org (victor.stinner) Date: Thu, 15 Aug 2013 11:40:43 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_rephrase_the_Secur?= =?utf-8?q?ity_Vulnerability_section?= Message-ID: <3cG2hq4S82z7Lkn@mail.python.org> http://hg.python.org/peps/rev/08f79790ec9b changeset: 5058:08f79790ec9b user: Victor Stinner date: Thu Aug 15 11:38:10 2013 +0200 summary: PEP 446: rephrase the Security Vulnerability section files: pep-0446.txt | 22 ++++++++++------------ 1 files changed, 10 insertions(+), 12 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -170,18 +170,11 @@ Security Vulnerability ---------------------- -Leaking file descriptors is also a well known security vulnerability: -read -`FIO42-C. Ensure files are properly closed when they are no longer -needed -`_ -of the CERT. - -An untrusted child process can read sensitive data like passwords and -take control of the parent process though leaked file descriptors. It is -for example a way to escape from a chroot. With a leaked listening -socket, a child process can accept new connections to read sensitive -data. +Leaking sensitive file handles and file descriptors can lead to security +vulnerabilities. An untrusted child process can read sensitive data like +passwords and take control of the parent process though leaked file +descriptors. With a leaked listening socket, a child process can accept +new connections to read sensitive data. Example of vulnerabilities: @@ -199,6 +192,11 @@ `_ (2011) +Read also the CERT Secure Coding Standards: +`FIO42-C. Ensure files are properly closed when they are no longer +needed +`_. + Issues fixed in the subprocess module ------------------------------------- -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Aug 15 11:54:43 2013 From: python-checkins at python.org (victor.stinner) Date: Thu, 15 Aug 2013 11:54:43 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_add_a_link_explain?= =?utf-8?q?ing_why_leaked_file_descriptors_cause_issue_on?= Message-ID: <3cG30z45cYz7Ljx@mail.python.org> http://hg.python.org/peps/rev/b37eb6087966 changeset: 5059:b37eb6087966 user: Victor Stinner date: Thu Aug 15 11:54:36 2013 +0200 summary: PEP 446: add a link explaining why leaked file descriptors cause issue on SELinux files: pep-0446.txt | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -572,6 +572,9 @@ history of ``O_CLOEXEC`` and ``O_NONBLOCK`` flags * `File descriptor handling changes in 2.6.27 `_ +* `Excuse me son, but your code is leaking !!! + `_ (Dan Walsh, + March 2012): SELinux issues with leaked file descriptors Copyright -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Aug 15 11:58:05 2013 From: python-checkins at python.org (victor.stinner) Date: Thu, 15 Aug 2013 11:58:05 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4Mjk2?= =?utf-8?q?=3A_Try_to_fix_TestSendfile=2Etest=5Ftrailers=28=29_of_test=5Fo?= =?utf-8?q?s_on_FreeBSD?= Message-ID: <3cG34s3vf5zRkh@mail.python.org> http://hg.python.org/cpython/rev/924d327da3af changeset: 85177:924d327da3af branch: 3.3 parent: 85173:e22e7268e58a user: Victor Stinner date: Thu Aug 15 11:57:02 2013 +0200 summary: Issue #18296: Try to fix TestSendfile.test_trailers() of test_os on FreeBSD files: Lib/test/test_os.py | 9 +++++---- 1 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -1950,16 +1950,17 @@ def test_trailers(self): TESTFN2 = support.TESTFN + "2" + file_data = b"abcdef" with open(TESTFN2, 'wb') as f: - f.write(b"abcde") + f.write(file_data) with open(TESTFN2, 'rb')as f: self.addCleanup(os.remove, TESTFN2) - os.sendfile(self.sockno, f.fileno(), 0, 4096, - trailers=[b"12345"]) + os.sendfile(self.sockno, f.fileno(), 0, len(file_data), + trailers=[b"1234"]) self.client.close() self.server.wait() data = self.server.handler_instance.get_data() - self.assertEqual(data, b"abcde12345") + self.assertEqual(data, b"abcdef1234") if hasattr(os, "SF_NODISKIO"): def test_flags(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 11:58:06 2013 From: python-checkins at python.org (victor.stinner) Date: Thu, 15 Aug 2013 11:58:06 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_=28Merge_3=2E3=29_Issue_=2318296=3A_Try_to_fix_TestSendf?= =?utf-8?q?ile=2Etest=5Ftrailers=28=29_of_test=5Fos_on?= Message-ID: <3cG34t5pTxz7Ljp@mail.python.org> http://hg.python.org/cpython/rev/92039fb68483 changeset: 85178:92039fb68483 parent: 85176:9c019475bb2f parent: 85177:924d327da3af user: Victor Stinner date: Thu Aug 15 11:57:19 2013 +0200 summary: (Merge 3.3) Issue #18296: Try to fix TestSendfile.test_trailers() of test_os on FreeBSD files: Lib/test/test_os.py | 9 +++++---- 1 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -1963,16 +1963,17 @@ def test_trailers(self): TESTFN2 = support.TESTFN + "2" + file_data = b"abcdef" with open(TESTFN2, 'wb') as f: - f.write(b"abcde") + f.write(file_data) with open(TESTFN2, 'rb')as f: self.addCleanup(os.remove, TESTFN2) - os.sendfile(self.sockno, f.fileno(), 0, 4096, - trailers=[b"12345"]) + os.sendfile(self.sockno, f.fileno(), 0, len(file_data), + trailers=[b"1234"]) self.client.close() self.server.wait() data = self.server.handler_instance.get_data() - self.assertEqual(data, b"abcde12345") + self.assertEqual(data, b"abcdef1234") if hasattr(os, "SF_NODISKIO"): def test_flags(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 18:32:14 2013 From: python-checkins at python.org (christian.heimes) Date: Thu, 15 Aug 2013 18:32:14 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Change_the_builtin_hash_al?= =?utf-8?q?gorithms=27_names_to_lower_case_names?= Message-ID: <3cGCqf5ZV3z7Lk4@mail.python.org> http://hg.python.org/cpython/rev/9a4949f5d15c changeset: 85179:9a4949f5d15c user: Christian Heimes date: Thu Aug 15 18:31:48 2013 +0200 summary: Change the builtin hash algorithms' names to lower case names as promised by hashlib's documentation. files: Lib/test/test_hashlib.py | 4 ++-- Misc/NEWS | 3 +++ Modules/md5module.c | 2 +- Modules/sha1module.c | 2 +- Modules/sha256module.c | 4 ++-- Modules/sha512module.c | 4 ++-- 6 files changed, 11 insertions(+), 8 deletions(-) diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -233,9 +233,9 @@ self.assertEqual(m.block_size, block_size) self.assertEqual(m.digest_size, digest_size) self.assertEqual(len(m.digest()), digest_size) - self.assertEqual(m.name.lower(), name.lower()) + self.assertEqual(m.name, name) # split for sha3_512 / _sha3.sha3 object - self.assertIn(name.split("_")[0], repr(m).lower()) + self.assertIn(name.split("_")[0], repr(m)) def test_blocksize_name(self): self.check_blocksize_name('md5', 64, 16) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -28,6 +28,9 @@ Library ------- +- Issue #18532: Change the builtin hash algorithms' names to lower case names + as promised by hashlib's documentation. + - Issue #18405: Improve the entropy of crypt.mksalt(). - Issue #12015: The tempfile module now uses a suffix of 8 random characters diff --git a/Modules/md5module.c b/Modules/md5module.c --- a/Modules/md5module.c +++ b/Modules/md5module.c @@ -439,7 +439,7 @@ static PyObject * MD5_get_name(PyObject *self, void *closure) { - return PyUnicode_FromStringAndSize("MD5", 3); + return PyUnicode_FromStringAndSize("md5", 3); } static PyObject * diff --git a/Modules/sha1module.c b/Modules/sha1module.c --- a/Modules/sha1module.c +++ b/Modules/sha1module.c @@ -411,7 +411,7 @@ static PyObject * SHA1_get_name(PyObject *self, void *closure) { - return PyUnicode_FromStringAndSize("SHA1", 4); + return PyUnicode_FromStringAndSize("sha1", 4); } static PyObject * diff --git a/Modules/sha256module.c b/Modules/sha256module.c --- a/Modules/sha256module.c +++ b/Modules/sha256module.c @@ -501,9 +501,9 @@ SHA256_get_name(PyObject *self, void *closure) { if (((SHAobject *)self)->digestsize == 32) - return PyUnicode_FromStringAndSize("SHA256", 6); + return PyUnicode_FromStringAndSize("sha256", 6); else - return PyUnicode_FromStringAndSize("SHA224", 6); + return PyUnicode_FromStringAndSize("sha224", 6); } static PyGetSetDef SHA_getseters[] = { diff --git a/Modules/sha512module.c b/Modules/sha512module.c --- a/Modules/sha512module.c +++ b/Modules/sha512module.c @@ -567,9 +567,9 @@ SHA512_get_name(PyObject *self, void *closure) { if (((SHAobject *)self)->digestsize == 64) - return PyUnicode_FromStringAndSize("SHA512", 6); + return PyUnicode_FromStringAndSize("sha512", 6); else - return PyUnicode_FromStringAndSize("SHA384", 6); + return PyUnicode_FromStringAndSize("sha384", 6); } static PyGetSetDef SHA_getseters[] = { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 20:15:27 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 15 Aug 2013 20:15:27 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Replace_an_overly_optimist?= =?utf-8?q?ic_assert=28=29_in_=5FPyGC=5FCollectNoFail_with_a_simple?= Message-ID: <3cGG6l3StFz7Lk5@mail.python.org> http://hg.python.org/cpython/rev/351657165a05 changeset: 85180:351657165a05 user: Antoine Pitrou date: Thu Aug 15 20:15:15 2013 +0200 summary: Replace an overly optimistic assert() in _PyGC_CollectNoFail with a simple guard. files: Modules/gcmodule.c | 19 +++++++++++++------ 1 files changed, 13 insertions(+), 6 deletions(-) diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1612,12 +1612,19 @@ { Py_ssize_t n; - /* This function should only be called on interpreter shutdown, and - therefore not recursively. */ - assert(!collecting); - collecting = 1; - n = collect(NUM_GENERATIONS - 1, NULL, NULL, 1); - collecting = 0; + /* Ideally, this function is only called on interpreter shutdown, + and therefore not recursively. Unfortunately, when there are daemon + threads, a daemon thread can start a cyclic garbage collection + during interpreter shutdown (and then never finish it). + See http://bugs.python.org/issue8713#msg195178 for an example. + */ + if (collecting) + n = 0; + else { + collecting = 1; + n = collect(NUM_GENERATIONS - 1, NULL, NULL, 1); + collecting = 0; + } return n; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 20:33:09 2013 From: python-checkins at python.org (terry.reedy) Date: Thu, 15 Aug 2013 20:33:09 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NDI1?= =?utf-8?q?=3A_Unittests_for_idlelib=2EIdleHistory=2E_First_patch_by_R=2E_?= =?utf-8?q?Jayakrishnan=2E?= Message-ID: <3cGGW96mBPzSQt@mail.python.org> http://hg.python.org/cpython/rev/0e9d41edb2e4 changeset: 85181:0e9d41edb2e4 branch: 2.7 parent: 85170:5063dab96843 user: Terry Jan Reedy date: Thu Aug 15 14:31:55 2013 -0400 summary: Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. files: Lib/idlelib/IdleHistory.py | 18 +- Lib/idlelib/idle_test/test_idlehistory.py | 164 ++++++++++ 2 files changed, 179 insertions(+), 3 deletions(-) diff --git a/Lib/idlelib/IdleHistory.py b/Lib/idlelib/IdleHistory.py --- a/Lib/idlelib/IdleHistory.py +++ b/Lib/idlelib/IdleHistory.py @@ -49,7 +49,13 @@ self.text.insert(where, output) def fetch(self, reverse): - "Fetch statememt and enter into text at cursor." + '''Fetch statememt and replace current line in text widget. + + Set prefix and pointer as needed for successive fetches. + Reset them to None, None when returning to the start line. + Sound bell when return to start line or cannot leave a line + because cyclic is False. + ''' nhist = len(self.history) pointer = self.pointer prefix = self.prefix @@ -64,7 +70,7 @@ else: if self.cyclic: pointer = -1 # will be incremented - else: + else: # abort history_next self.text.bell() return nprefix = len(prefix) @@ -75,7 +81,7 @@ pointer = pointer + 1 if pointer < 0 or pointer >= nhist: self.text.bell() - if not self.cyclic and pointer < 0: + if not self.cyclic and pointer < 0: # abort history_prev return else: if self._get_source("iomark", "end-1c") != prefix: @@ -106,3 +112,9 @@ self.history.append(source) self.pointer = None self.prefix = None + +if __name__ == "__main__": + from test import support + support.use_resources = ['gui'] + from unittest import main + main('idlelib.idle_test.test_idlehistory', verbosity=2, exit=False) diff --git a/Lib/idlelib/idle_test/test_idlehistory.py b/Lib/idlelib/idle_test/test_idlehistory.py new file mode 100644 --- /dev/null +++ b/Lib/idlelib/idle_test/test_idlehistory.py @@ -0,0 +1,164 @@ +import unittest +from test.test_support import requires + +import Tkinter as tk +from Tkinter import Text as tkText +from idlelib.idle_test.mock_tk import Text as mkText +from idlelib.IdleHistory import History +from idlelib.configHandler import idleConf + +line1 = 'a = 7' +line2 = 'b = a' + +class StoreTest(unittest.TestCase): + '''Tests History.__init__ and History.store with mock Text''' + + @classmethod + def setUpClass(cls): + cls.text = mkText() + cls.history = History(cls.text) + + def tearDown(self): + self.text.delete('1.0', 'end') + self.history.history = [] + + def test_init(self): + self.assertIs(self.history.text, self.text) + self.assertEqual(self.history.history, []) + self.assertIsNone(self.history.prefix) + self.assertIsNone(self.history.pointer) + self.assertEqual(self.history.cyclic, + idleConf.GetOption("main", "History", "cyclic", 1, "bool")) + + def test_store_short(self): + self.history.store('a') + self.assertEqual(self.history.history, []) + self.history.store(' a ') + self.assertEqual(self.history.history, []) + + def test_store_dup(self): + self.history.store(line1) + self.assertEqual(self.history.history, [line1]) + self.history.store(line2) + self.assertEqual(self.history.history, [line1, line2]) + self.history.store(line1) + self.assertEqual(self.history.history, [line2, line1]) + + def test_store_reset(self): + self.history.prefix = line1 + self.history.pointer = 0 + self.history.store(line2) + self.assertIsNone(self.history.prefix) + self.assertIsNone(self.history.pointer) + + +class TextWrapper: + def __init__(self, master): + self.text = tkText(master=master) + self._bell = False + def __getattr__(self, name): + return getattr(self.text, name) + def bell(self): + self._bell = True + +class FetchTest(unittest.TestCase): + '''Test History.fetch with wrapped tk.Text. + ''' + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = tk.Tk() + + def setUp(self): + self.text = text = TextWrapper(self.root) + text.insert('1.0', ">>> ") + text.mark_set('iomark', '1.4') + text.mark_gravity('iomark', 'left') + self.history = History(text) + self.history.history = [line1, line2] + + @classmethod + def tearDownClass(cls): + cls.root.destroy() + + def fetch_test(self, reverse, line, prefix, index, bell=False): + # Perform one fetch as invoked by Alt-N or Alt-P + # Test the result. The line test is the most important. + # The last two are diagnostic of fetch internals. + History = self.history + History.fetch(reverse) + Equal = self.assertEqual + Equal(self.text.get('iomark', 'end-1c'), line) + Equal(self.text._bell, bell) + if bell: + self.text._bell = False + Equal(History.prefix, prefix) + Equal(History.pointer, index) + + def test_fetch_prev_cyclic(self): + prefix = '' + test = self.fetch_test + test(True, line2, prefix, 1) + test(True, line1, prefix, 0) + test(True, prefix, None, None, bell=True) + + def test_fetch_next_cyclic(self): + prefix = '' + test = self.fetch_test + test(False, line1, prefix, 0) + test(False, line2, prefix, 1) + test(False, prefix, None, None, bell=True) + + # Prefix 'a' tests skip line2, which starts with 'b' + def test_fetch_prev_prefix(self): + prefix = 'a' + self.text.insert('iomark', prefix) + self.fetch_test(True, line1, prefix, 0) + self.fetch_test(True, prefix, None, None, bell=True) + + def test_fetch_next_prefix(self): + prefix = 'a' + self.text.insert('iomark', prefix) + self.fetch_test(False, line1, prefix, 0) + self.fetch_test(False, prefix, None, None, bell=True) + + def test_fetch_prev_noncyclic(self): + prefix = '' + self.history.cyclic = False + test = self.fetch_test + test(True, line2, prefix, 1) + test(True, line1, prefix, 0) + test(True, line1, prefix, 0, bell=True) + + def test_fetch_next_noncyclic(self): + prefix = '' + self.history.cyclic = False + test = self.fetch_test + test(False, prefix, None, None, bell=True) + test(True, line2, prefix, 1) + test(False, prefix, None, None, bell=True) + test(False, prefix, None, None, bell=True) + + def test_fetch_cursor_move(self): + # Move cursor after fetch + self.history.fetch(reverse=True) # initialization + self.text.mark_set('insert', 'iomark') + self.fetch_test(True, line2, None, None, bell=True) + + def test_fetch_edit(self): + # Edit after fetch + self.history.fetch(reverse=True) # initialization + self.text.delete('iomark', 'insert', ) + self.text.insert('iomark', 'a =') + self.fetch_test(True, line1, 'a =', 0) # prefix is reset + + def test_history_prev_next(self): + # Minimally test functions bound to events + self.history.history_prev('dummy event') + self.assertEqual(self.history.pointer, 1) + self.history.history_next('dummy event') + self.assertEqual(self.history.pointer, None) + + +if __name__ == '__main__': + unittest.main(verbosity=2, exit=2) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 20:33:11 2013 From: python-checkins at python.org (terry.reedy) Date: Thu, 15 Aug 2013 20:33:11 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NDI1?= =?utf-8?q?=3A_Unittests_for_idlelib=2EIdleHistory=2E_First_patch_by_R=2E_?= =?utf-8?q?Jayakrishnan=2E?= Message-ID: <3cGGWC2gPlz7Ll5@mail.python.org> http://hg.python.org/cpython/rev/c4cac5d73e9d changeset: 85182:c4cac5d73e9d branch: 3.3 parent: 85177:924d327da3af user: Terry Jan Reedy date: Thu Aug 15 14:32:04 2013 -0400 summary: Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. files: Lib/idlelib/IdleHistory.py | 18 +- Lib/idlelib/idle_test/test_idlehistory.py | 164 ++++++++++ 2 files changed, 179 insertions(+), 3 deletions(-) diff --git a/Lib/idlelib/IdleHistory.py b/Lib/idlelib/IdleHistory.py --- a/Lib/idlelib/IdleHistory.py +++ b/Lib/idlelib/IdleHistory.py @@ -49,7 +49,13 @@ self.text.insert(where, output) def fetch(self, reverse): - "Fetch statememt and enter into text at cursor." + '''Fetch statememt and replace current line in text widget. + + Set prefix and pointer as needed for successive fetches. + Reset them to None, None when returning to the start line. + Sound bell when return to start line or cannot leave a line + because cyclic is False. + ''' nhist = len(self.history) pointer = self.pointer prefix = self.prefix @@ -64,7 +70,7 @@ else: if self.cyclic: pointer = -1 # will be incremented - else: + else: # abort history_next self.text.bell() return nprefix = len(prefix) @@ -75,7 +81,7 @@ pointer = pointer + 1 if pointer < 0 or pointer >= nhist: self.text.bell() - if not self.cyclic and pointer < 0: + if not self.cyclic and pointer < 0: # abort history_prev return else: if self._get_source("iomark", "end-1c") != prefix: @@ -106,3 +112,9 @@ self.history.append(source) self.pointer = None self.prefix = None + +if __name__ == "__main__": + from test import support + support.use_resources = ['gui'] + from unittest import main + main('idlelib.idle_test.test_idlehistory', verbosity=2, exit=False) diff --git a/Lib/idlelib/idle_test/test_idlehistory.py b/Lib/idlelib/idle_test/test_idlehistory.py new file mode 100644 --- /dev/null +++ b/Lib/idlelib/idle_test/test_idlehistory.py @@ -0,0 +1,164 @@ +import unittest +from test.support import requires + +import tkinter as tk +from tkinter import Text as tkText +from idlelib.idle_test.mock_tk import Text as mkText +from idlelib.IdleHistory import History +from idlelib.configHandler import idleConf + +line1 = 'a = 7' +line2 = 'b = a' + +class StoreTest(unittest.TestCase): + '''Tests History.__init__ and History.store with mock Text''' + + @classmethod + def setUpClass(cls): + cls.text = mkText() + cls.history = History(cls.text) + + def tearDown(self): + self.text.delete('1.0', 'end') + self.history.history = [] + + def test_init(self): + self.assertIs(self.history.text, self.text) + self.assertEqual(self.history.history, []) + self.assertIsNone(self.history.prefix) + self.assertIsNone(self.history.pointer) + self.assertEqual(self.history.cyclic, + idleConf.GetOption("main", "History", "cyclic", 1, "bool")) + + def test_store_short(self): + self.history.store('a') + self.assertEqual(self.history.history, []) + self.history.store(' a ') + self.assertEqual(self.history.history, []) + + def test_store_dup(self): + self.history.store(line1) + self.assertEqual(self.history.history, [line1]) + self.history.store(line2) + self.assertEqual(self.history.history, [line1, line2]) + self.history.store(line1) + self.assertEqual(self.history.history, [line2, line1]) + + def test_store_reset(self): + self.history.prefix = line1 + self.history.pointer = 0 + self.history.store(line2) + self.assertIsNone(self.history.prefix) + self.assertIsNone(self.history.pointer) + + +class TextWrapper: + def __init__(self, master): + self.text = tkText(master=master) + self._bell = False + def __getattr__(self, name): + return getattr(self.text, name) + def bell(self): + self._bell = True + +class FetchTest(unittest.TestCase): + '''Test History.fetch with wrapped tk.Text. + ''' + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = tk.Tk() + + def setUp(self): + self.text = text = TextWrapper(self.root) + text.insert('1.0', ">>> ") + text.mark_set('iomark', '1.4') + text.mark_gravity('iomark', 'left') + self.history = History(text) + self.history.history = [line1, line2] + + @classmethod + def tearDownClass(cls): + cls.root.destroy() + + def fetch_test(self, reverse, line, prefix, index, *, bell=False): + # Perform one fetch as invoked by Alt-N or Alt-P + # Test the result. The line test is the most important. + # The last two are diagnostic of fetch internals. + History = self.history + History.fetch(reverse) + Equal = self.assertEqual + Equal(self.text.get('iomark', 'end-1c'), line) + Equal(self.text._bell, bell) + if bell: + self.text._bell = False + Equal(History.prefix, prefix) + Equal(History.pointer, index) + + def test_fetch_prev_cyclic(self): + prefix = '' + test = self.fetch_test + test(True, line2, prefix, 1) + test(True, line1, prefix, 0) + test(True, prefix, None, None, bell=True) + + def test_fetch_next_cyclic(self): + prefix = '' + test = self.fetch_test + test(False, line1, prefix, 0) + test(False, line2, prefix, 1) + test(False, prefix, None, None, bell=True) + + # Prefix 'a' tests skip line2, which starts with 'b' + def test_fetch_prev_prefix(self): + prefix = 'a' + self.text.insert('iomark', prefix) + self.fetch_test(True, line1, prefix, 0) + self.fetch_test(True, prefix, None, None, bell=True) + + def test_fetch_next_prefix(self): + prefix = 'a' + self.text.insert('iomark', prefix) + self.fetch_test(False, line1, prefix, 0) + self.fetch_test(False, prefix, None, None, bell=True) + + def test_fetch_prev_noncyclic(self): + prefix = '' + self.history.cyclic = False + test = self.fetch_test + test(True, line2, prefix, 1) + test(True, line1, prefix, 0) + test(True, line1, prefix, 0, bell=True) + + def test_fetch_next_noncyclic(self): + prefix = '' + self.history.cyclic = False + test = self.fetch_test + test(False, prefix, None, None, bell=True) + test(True, line2, prefix, 1) + test(False, prefix, None, None, bell=True) + test(False, prefix, None, None, bell=True) + + def test_fetch_cursor_move(self): + # Move cursor after fetch + self.history.fetch(reverse=True) # initialization + self.text.mark_set('insert', 'iomark') + self.fetch_test(True, line2, None, None, bell=True) + + def test_fetch_edit(self): + # Edit after fetch + self.history.fetch(reverse=True) # initialization + self.text.delete('iomark', 'insert', ) + self.text.insert('iomark', 'a =') + self.fetch_test(True, line1, 'a =', 0) # prefix is reset + + def test_history_prev_next(self): + # Minimally test functions bound to events + self.history.history_prev('dummy event') + self.assertEqual(self.history.pointer, 1) + self.history.history_next('dummy event') + self.assertEqual(self.history.pointer, None) + + +if __name__ == '__main__': + unittest.main(verbosity=2, exit=2) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 20:33:12 2013 From: python-checkins at python.org (terry.reedy) Date: Thu, 15 Aug 2013 20:33:12 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_with_3=2E3?= Message-ID: <3cGGWD5Y6Dz7Lkv@mail.python.org> http://hg.python.org/cpython/rev/22b35ddac2e9 changeset: 85183:22b35ddac2e9 parent: 85180:351657165a05 parent: 85182:c4cac5d73e9d user: Terry Jan Reedy date: Thu Aug 15 14:32:46 2013 -0400 summary: Merge with 3.3 files: Lib/idlelib/IdleHistory.py | 18 +- Lib/idlelib/idle_test/test_idlehistory.py | 164 ++++++++++ 2 files changed, 179 insertions(+), 3 deletions(-) diff --git a/Lib/idlelib/IdleHistory.py b/Lib/idlelib/IdleHistory.py --- a/Lib/idlelib/IdleHistory.py +++ b/Lib/idlelib/IdleHistory.py @@ -49,7 +49,13 @@ self.text.insert(where, output) def fetch(self, reverse): - "Fetch statememt and enter into text at cursor." + '''Fetch statememt and replace current line in text widget. + + Set prefix and pointer as needed for successive fetches. + Reset them to None, None when returning to the start line. + Sound bell when return to start line or cannot leave a line + because cyclic is False. + ''' nhist = len(self.history) pointer = self.pointer prefix = self.prefix @@ -64,7 +70,7 @@ else: if self.cyclic: pointer = -1 # will be incremented - else: + else: # abort history_next self.text.bell() return nprefix = len(prefix) @@ -75,7 +81,7 @@ pointer = pointer + 1 if pointer < 0 or pointer >= nhist: self.text.bell() - if not self.cyclic and pointer < 0: + if not self.cyclic and pointer < 0: # abort history_prev return else: if self._get_source("iomark", "end-1c") != prefix: @@ -106,3 +112,9 @@ self.history.append(source) self.pointer = None self.prefix = None + +if __name__ == "__main__": + from test import support + support.use_resources = ['gui'] + from unittest import main + main('idlelib.idle_test.test_idlehistory', verbosity=2, exit=False) diff --git a/Lib/idlelib/idle_test/test_idlehistory.py b/Lib/idlelib/idle_test/test_idlehistory.py new file mode 100644 --- /dev/null +++ b/Lib/idlelib/idle_test/test_idlehistory.py @@ -0,0 +1,164 @@ +import unittest +from test.support import requires + +import tkinter as tk +from tkinter import Text as tkText +from idlelib.idle_test.mock_tk import Text as mkText +from idlelib.IdleHistory import History +from idlelib.configHandler import idleConf + +line1 = 'a = 7' +line2 = 'b = a' + +class StoreTest(unittest.TestCase): + '''Tests History.__init__ and History.store with mock Text''' + + @classmethod + def setUpClass(cls): + cls.text = mkText() + cls.history = History(cls.text) + + def tearDown(self): + self.text.delete('1.0', 'end') + self.history.history = [] + + def test_init(self): + self.assertIs(self.history.text, self.text) + self.assertEqual(self.history.history, []) + self.assertIsNone(self.history.prefix) + self.assertIsNone(self.history.pointer) + self.assertEqual(self.history.cyclic, + idleConf.GetOption("main", "History", "cyclic", 1, "bool")) + + def test_store_short(self): + self.history.store('a') + self.assertEqual(self.history.history, []) + self.history.store(' a ') + self.assertEqual(self.history.history, []) + + def test_store_dup(self): + self.history.store(line1) + self.assertEqual(self.history.history, [line1]) + self.history.store(line2) + self.assertEqual(self.history.history, [line1, line2]) + self.history.store(line1) + self.assertEqual(self.history.history, [line2, line1]) + + def test_store_reset(self): + self.history.prefix = line1 + self.history.pointer = 0 + self.history.store(line2) + self.assertIsNone(self.history.prefix) + self.assertIsNone(self.history.pointer) + + +class TextWrapper: + def __init__(self, master): + self.text = tkText(master=master) + self._bell = False + def __getattr__(self, name): + return getattr(self.text, name) + def bell(self): + self._bell = True + +class FetchTest(unittest.TestCase): + '''Test History.fetch with wrapped tk.Text. + ''' + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = tk.Tk() + + def setUp(self): + self.text = text = TextWrapper(self.root) + text.insert('1.0', ">>> ") + text.mark_set('iomark', '1.4') + text.mark_gravity('iomark', 'left') + self.history = History(text) + self.history.history = [line1, line2] + + @classmethod + def tearDownClass(cls): + cls.root.destroy() + + def fetch_test(self, reverse, line, prefix, index, *, bell=False): + # Perform one fetch as invoked by Alt-N or Alt-P + # Test the result. The line test is the most important. + # The last two are diagnostic of fetch internals. + History = self.history + History.fetch(reverse) + Equal = self.assertEqual + Equal(self.text.get('iomark', 'end-1c'), line) + Equal(self.text._bell, bell) + if bell: + self.text._bell = False + Equal(History.prefix, prefix) + Equal(History.pointer, index) + + def test_fetch_prev_cyclic(self): + prefix = '' + test = self.fetch_test + test(True, line2, prefix, 1) + test(True, line1, prefix, 0) + test(True, prefix, None, None, bell=True) + + def test_fetch_next_cyclic(self): + prefix = '' + test = self.fetch_test + test(False, line1, prefix, 0) + test(False, line2, prefix, 1) + test(False, prefix, None, None, bell=True) + + # Prefix 'a' tests skip line2, which starts with 'b' + def test_fetch_prev_prefix(self): + prefix = 'a' + self.text.insert('iomark', prefix) + self.fetch_test(True, line1, prefix, 0) + self.fetch_test(True, prefix, None, None, bell=True) + + def test_fetch_next_prefix(self): + prefix = 'a' + self.text.insert('iomark', prefix) + self.fetch_test(False, line1, prefix, 0) + self.fetch_test(False, prefix, None, None, bell=True) + + def test_fetch_prev_noncyclic(self): + prefix = '' + self.history.cyclic = False + test = self.fetch_test + test(True, line2, prefix, 1) + test(True, line1, prefix, 0) + test(True, line1, prefix, 0, bell=True) + + def test_fetch_next_noncyclic(self): + prefix = '' + self.history.cyclic = False + test = self.fetch_test + test(False, prefix, None, None, bell=True) + test(True, line2, prefix, 1) + test(False, prefix, None, None, bell=True) + test(False, prefix, None, None, bell=True) + + def test_fetch_cursor_move(self): + # Move cursor after fetch + self.history.fetch(reverse=True) # initialization + self.text.mark_set('insert', 'iomark') + self.fetch_test(True, line2, None, None, bell=True) + + def test_fetch_edit(self): + # Edit after fetch + self.history.fetch(reverse=True) # initialization + self.text.delete('iomark', 'insert', ) + self.text.insert('iomark', 'a =') + self.fetch_test(True, line1, 'a =', 0) # prefix is reset + + def test_history_prev_next(self): + # Minimally test functions bound to events + self.history.history_prev('dummy event') + self.assertEqual(self.history.pointer, 1) + self.history.history_next('dummy event') + self.assertEqual(self.history.pointer, None) + + +if __name__ == '__main__': + unittest.main(verbosity=2, exit=2) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 21:08:18 2013 From: python-checkins at python.org (terry.reedy) Date: Thu, 15 Aug 2013 21:08:18 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4MjI2?= =?utf-8?q?=3A_Fix_ImportError_and_subsequent_TypeError_in_2=2E7_backport?= =?utf-8?q?=2E?= Message-ID: <3cGHHk3wyFzRhQ@mail.python.org> http://hg.python.org/cpython/rev/47307e7c80e1 changeset: 85184:47307e7c80e1 branch: 2.7 parent: 85181:0e9d41edb2e4 user: Terry Jan Reedy date: Thu Aug 15 15:07:58 2013 -0400 summary: Issue #18226: Fix ImportError and subsequent TypeError in 2.7 backport. Running py27\PCbuild> python_d -m test.regrtest -ugui test_idle ignores the exceptions and gives no indication of a problem (fixed in 3.3). files: Lib/idlelib/idle_test/test_formatparagraph.py | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/idlelib/idle_test/test_formatparagraph.py b/Lib/idlelib/idle_test/test_formatparagraph.py --- a/Lib/idlelib/idle_test/test_formatparagraph.py +++ b/Lib/idlelib/idle_test/test_formatparagraph.py @@ -2,8 +2,8 @@ import unittest from idlelib import FormatParagraph as fp from idlelib.EditorWindow import EditorWindow -from tkinter import Tk, Text, TclError -from test.support import requires +from Tkinter import Tk, Text, TclError +from test.test_support import requires class Is_Get_Test(unittest.TestCase): @@ -238,7 +238,7 @@ class Editor: def __init__(self, root): self.text = TextWrapper(root) - get_selection_indices = EditorWindow. get_selection_indices + get_selection_indices = EditorWindow. get_selection_indices.im_func class FormatEventTest(unittest.TestCase): """Test the formatting of text inside a Text widget. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 22:19:55 2013 From: python-checkins at python.org (terry.reedy) Date: Thu, 15 Aug 2013 22:19:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NzMy?= =?utf-8?q?=3A_Remove_unused*_parameter_output=5Fsep_from_IdleHistory=2EHi?= =?utf-8?q?story?= Message-ID: <3cGJtM6snpz7Ljf@mail.python.org> http://hg.python.org/cpython/rev/7339dcff171f changeset: 85185:7339dcff171f branch: 2.7 user: Terry Jan Reedy date: Thu Aug 15 16:19:07 2013 -0400 summary: Issue #18732: Remove unused* parameter output_sep from IdleHistory.History and paired splits and joins that do nothing when output_sep is its default \n. *It in unused in that the class in only instantiated once, with the default. Make a few other changes in .fetch and its test. files: Lib/idlelib/IdleHistory.py | 32 +++------- Lib/idlelib/idle_test/test_idlehistory.py | 2 + 2 files changed, 11 insertions(+), 23 deletions(-) diff --git a/Lib/idlelib/IdleHistory.py b/Lib/idlelib/IdleHistory.py --- a/Lib/idlelib/IdleHistory.py +++ b/Lib/idlelib/IdleHistory.py @@ -10,7 +10,7 @@ history_next - Bound to <> event (default Alt-N). history_prev - Bound to <> event (default Alt-P). ''' - def __init__(self, text, output_sep = "\n"): + def __init__(self, text): '''Initialize data attributes and bind event methods. .text - Idle wrapper of tk Text widget, with .bell(). @@ -23,7 +23,6 @@ self.history = [] self.prefix = None self.pointer = None - self.output_sep = output_sep self.cyclic = idleConf.GetOption("main", "History", "cyclic", 1, "bool") text.bind("<>", self.history_prev) text.bind("<>", self.history_next) @@ -38,16 +37,6 @@ self.fetch(reverse=True) return "break" - def _get_source(self, start, end): - # Get source code from start index to end index. Lines in the - # text control may be separated by sys.ps2 . - lines = self.text.get(start, end).split(self.output_sep) - return "\n".join(lines) - - def _put_source(self, where, source): - output = self.output_sep.join(source.split("\n")) - self.text.insert(where, output) - def fetch(self, reverse): '''Fetch statememt and replace current line in text widget. @@ -61,10 +50,11 @@ prefix = self.prefix if pointer is not None and prefix is not None: if self.text.compare("insert", "!=", "end-1c") or \ - self._get_source("iomark", "end-1c") != self.history[pointer]: + self.text.get("iomark", "end-1c") != self.history[pointer]: pointer = prefix = None + self.text.mark_set("insert", "end-1c") # != after cursor move if pointer is None or prefix is None: - prefix = self._get_source("iomark", "end-1c") + prefix = self.text.get("iomark", "end-1c") if reverse: pointer = nhist # will be decremented else: @@ -75,26 +65,22 @@ return nprefix = len(prefix) while 1: - if reverse: - pointer = pointer - 1 - else: - pointer = pointer + 1 + pointer += -1 if reverse else 1 if pointer < 0 or pointer >= nhist: self.text.bell() if not self.cyclic and pointer < 0: # abort history_prev return else: - if self._get_source("iomark", "end-1c") != prefix: + if self.text.get("iomark", "end-1c") != prefix: self.text.delete("iomark", "end-1c") - self._put_source("iomark", prefix) + self.text.insert("iomark", prefix) pointer = prefix = None break item = self.history[pointer] if item[:nprefix] == prefix and len(item) > nprefix: self.text.delete("iomark", "end-1c") - self._put_source("iomark", item) + self.text.insert("iomark", item) break - self.text.mark_set("insert", "end-1c") self.text.see("insert") self.text.tag_remove("sel", "1.0", "end") self.pointer = pointer @@ -114,7 +100,7 @@ self.prefix = None if __name__ == "__main__": - from test import support + from test import test_support as support support.use_resources = ['gui'] from unittest import main main('idlelib.idle_test.test_idlehistory', verbosity=2, exit=False) diff --git a/Lib/idlelib/idle_test/test_idlehistory.py b/Lib/idlelib/idle_test/test_idlehistory.py --- a/Lib/idlelib/idle_test/test_idlehistory.py +++ b/Lib/idlelib/idle_test/test_idlehistory.py @@ -87,6 +87,7 @@ # The last two are diagnostic of fetch internals. History = self.history History.fetch(reverse) + Equal = self.assertEqual Equal(self.text.get('iomark', 'end-1c'), line) Equal(self.text._bell, bell) @@ -94,6 +95,7 @@ self.text._bell = False Equal(History.prefix, prefix) Equal(History.pointer, index) + Equal(self.text.compare("insert", '==', "end-1c"), 1) def test_fetch_prev_cyclic(self): prefix = '' -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 22:19:57 2013 From: python-checkins at python.org (terry.reedy) Date: Thu, 15 Aug 2013 22:19:57 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzMy?= =?utf-8?q?=3A_Remove_unused*_parameter_output=5Fsep_from_IdleHistory=2EHi?= =?utf-8?q?story?= Message-ID: <3cGJtP2gnPz7Ljy@mail.python.org> http://hg.python.org/cpython/rev/3105b78d3434 changeset: 85186:3105b78d3434 branch: 3.3 parent: 85182:c4cac5d73e9d user: Terry Jan Reedy date: Thu Aug 15 16:19:13 2013 -0400 summary: Issue #18732: Remove unused* parameter output_sep from IdleHistory.History and paired splits and joins that do nothing when output_sep is its default \n. *It in unused in that the class in only instantiated once, with the default. Make a few other changes in .fetch and its test. files: Lib/idlelib/IdleHistory.py | 30 ++-------- Lib/idlelib/idle_test/test_idlehistory.py | 2 + 2 files changed, 10 insertions(+), 22 deletions(-) diff --git a/Lib/idlelib/IdleHistory.py b/Lib/idlelib/IdleHistory.py --- a/Lib/idlelib/IdleHistory.py +++ b/Lib/idlelib/IdleHistory.py @@ -10,7 +10,7 @@ history_next - Bound to <> event (default Alt-N). history_prev - Bound to <> event (default Alt-P). ''' - def __init__(self, text, output_sep = "\n"): + def __init__(self, text): '''Initialize data attributes and bind event methods. .text - Idle wrapper of tk Text widget, with .bell(). @@ -23,7 +23,6 @@ self.history = [] self.prefix = None self.pointer = None - self.output_sep = output_sep self.cyclic = idleConf.GetOption("main", "History", "cyclic", 1, "bool") text.bind("<>", self.history_prev) text.bind("<>", self.history_next) @@ -38,16 +37,6 @@ self.fetch(reverse=True) return "break" - def _get_source(self, start, end): - # Get source code from start index to end index. Lines in the - # text control may be separated by sys.ps2 . - lines = self.text.get(start, end).split(self.output_sep) - return "\n".join(lines) - - def _put_source(self, where, source): - output = self.output_sep.join(source.split("\n")) - self.text.insert(where, output) - def fetch(self, reverse): '''Fetch statememt and replace current line in text widget. @@ -61,10 +50,11 @@ prefix = self.prefix if pointer is not None and prefix is not None: if self.text.compare("insert", "!=", "end-1c") or \ - self._get_source("iomark", "end-1c") != self.history[pointer]: + self.text.get("iomark", "end-1c") != self.history[pointer]: pointer = prefix = None + self.text.mark_set("insert", "end-1c") # != after cursor move if pointer is None or prefix is None: - prefix = self._get_source("iomark", "end-1c") + prefix = self.text.get("iomark", "end-1c") if reverse: pointer = nhist # will be decremented else: @@ -75,26 +65,22 @@ return nprefix = len(prefix) while 1: - if reverse: - pointer = pointer - 1 - else: - pointer = pointer + 1 + pointer += -1 if reverse else 1 if pointer < 0 or pointer >= nhist: self.text.bell() if not self.cyclic and pointer < 0: # abort history_prev return else: - if self._get_source("iomark", "end-1c") != prefix: + if self.text.get("iomark", "end-1c") != prefix: self.text.delete("iomark", "end-1c") - self._put_source("iomark", prefix) + self.text.insert("iomark", prefix) pointer = prefix = None break item = self.history[pointer] if item[:nprefix] == prefix and len(item) > nprefix: self.text.delete("iomark", "end-1c") - self._put_source("iomark", item) + self.text.insert("iomark", item) break - self.text.mark_set("insert", "end-1c") self.text.see("insert") self.text.tag_remove("sel", "1.0", "end") self.pointer = pointer diff --git a/Lib/idlelib/idle_test/test_idlehistory.py b/Lib/idlelib/idle_test/test_idlehistory.py --- a/Lib/idlelib/idle_test/test_idlehistory.py +++ b/Lib/idlelib/idle_test/test_idlehistory.py @@ -87,6 +87,7 @@ # The last two are diagnostic of fetch internals. History = self.history History.fetch(reverse) + Equal = self.assertEqual Equal(self.text.get('iomark', 'end-1c'), line) Equal(self.text._bell, bell) @@ -94,6 +95,7 @@ self.text._bell = False Equal(History.prefix, prefix) Equal(History.pointer, index) + Equal(self.text.compare("insert", '==', "end-1c"), 1) def test_fetch_prev_cyclic(self): prefix = '' -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 15 22:19:58 2013 From: python-checkins at python.org (terry.reedy) Date: Thu, 15 Aug 2013 22:19:58 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_with_3=2E3?= Message-ID: <3cGJtQ5Ybyz7LkM@mail.python.org> http://hg.python.org/cpython/rev/e0f36a9420e4 changeset: 85187:e0f36a9420e4 parent: 85183:22b35ddac2e9 parent: 85186:3105b78d3434 user: Terry Jan Reedy date: Thu Aug 15 16:19:33 2013 -0400 summary: Merge with 3.3 files: Lib/idlelib/IdleHistory.py | 30 ++-------- Lib/idlelib/idle_test/test_idlehistory.py | 2 + 2 files changed, 10 insertions(+), 22 deletions(-) diff --git a/Lib/idlelib/IdleHistory.py b/Lib/idlelib/IdleHistory.py --- a/Lib/idlelib/IdleHistory.py +++ b/Lib/idlelib/IdleHistory.py @@ -10,7 +10,7 @@ history_next - Bound to <> event (default Alt-N). history_prev - Bound to <> event (default Alt-P). ''' - def __init__(self, text, output_sep = "\n"): + def __init__(self, text): '''Initialize data attributes and bind event methods. .text - Idle wrapper of tk Text widget, with .bell(). @@ -23,7 +23,6 @@ self.history = [] self.prefix = None self.pointer = None - self.output_sep = output_sep self.cyclic = idleConf.GetOption("main", "History", "cyclic", 1, "bool") text.bind("<>", self.history_prev) text.bind("<>", self.history_next) @@ -38,16 +37,6 @@ self.fetch(reverse=True) return "break" - def _get_source(self, start, end): - # Get source code from start index to end index. Lines in the - # text control may be separated by sys.ps2 . - lines = self.text.get(start, end).split(self.output_sep) - return "\n".join(lines) - - def _put_source(self, where, source): - output = self.output_sep.join(source.split("\n")) - self.text.insert(where, output) - def fetch(self, reverse): '''Fetch statememt and replace current line in text widget. @@ -61,10 +50,11 @@ prefix = self.prefix if pointer is not None and prefix is not None: if self.text.compare("insert", "!=", "end-1c") or \ - self._get_source("iomark", "end-1c") != self.history[pointer]: + self.text.get("iomark", "end-1c") != self.history[pointer]: pointer = prefix = None + self.text.mark_set("insert", "end-1c") # != after cursor move if pointer is None or prefix is None: - prefix = self._get_source("iomark", "end-1c") + prefix = self.text.get("iomark", "end-1c") if reverse: pointer = nhist # will be decremented else: @@ -75,26 +65,22 @@ return nprefix = len(prefix) while 1: - if reverse: - pointer = pointer - 1 - else: - pointer = pointer + 1 + pointer += -1 if reverse else 1 if pointer < 0 or pointer >= nhist: self.text.bell() if not self.cyclic and pointer < 0: # abort history_prev return else: - if self._get_source("iomark", "end-1c") != prefix: + if self.text.get("iomark", "end-1c") != prefix: self.text.delete("iomark", "end-1c") - self._put_source("iomark", prefix) + self.text.insert("iomark", prefix) pointer = prefix = None break item = self.history[pointer] if item[:nprefix] == prefix and len(item) > nprefix: self.text.delete("iomark", "end-1c") - self._put_source("iomark", item) + self.text.insert("iomark", item) break - self.text.mark_set("insert", "end-1c") self.text.see("insert") self.text.tag_remove("sel", "1.0", "end") self.pointer = pointer diff --git a/Lib/idlelib/idle_test/test_idlehistory.py b/Lib/idlelib/idle_test/test_idlehistory.py --- a/Lib/idlelib/idle_test/test_idlehistory.py +++ b/Lib/idlelib/idle_test/test_idlehistory.py @@ -87,6 +87,7 @@ # The last two are diagnostic of fetch internals. History = self.history History.fetch(reverse) + Equal = self.assertEqual Equal(self.text.get('iomark', 'end-1c'), line) Equal(self.text._bell, bell) @@ -94,6 +95,7 @@ self.text._bell = False Equal(History.prefix, prefix) Equal(History.pointer, index) + Equal(self.text.compare("insert", '==', "end-1c"), 1) def test_fetch_prev_cyclic(self): prefix = '' -- Repository URL: http://hg.python.org/cpython From tjreedy at udel.edu Thu Aug 15 21:27:24 2013 From: tjreedy at udel.edu (Terry Reedy) Date: Thu, 15 Aug 2013 15:27:24 -0400 Subject: [Python-checkins] cpython (2.7): Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. In-Reply-To: <3cGGW96mBPzSQt@mail.python.org> References: <3cGGW96mBPzSQt@mail.python.org> Message-ID: <520D2B9C.1080002@udel.edu> On 8/15/2013 2:33 PM, terry.reedy wrote: > http://hg.python.org/cpython/rev/0e9d41edb2e4 > changeset: 85181:0e9d41edb2e4 > branch: 2.7 > parent: 85170:5063dab96843 > user: Terry Jan Reedy > date: Thu Aug 15 14:31:55 2013 -0400 > summary: > Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. > > Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. > > Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. > > Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. > Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. > Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. > Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. > Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. > Issue #18425: Unittests for idlelib.IdleHistory. First patch by R. Jayakrishnan. Sorry about the repetition. The shrunken hgWorkbench commit message window was not showing anything when I inserted with ^V and when it finally did, I did not see the repetition. From solipsis at pitrou.net Fri Aug 16 05:56:36 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Fri, 16 Aug 2013 05:56:36 +0200 Subject: [Python-checkins] Daily reference leaks (e0f36a9420e4): sum=0 Message-ID: results for e0f36a9420e4 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogWz1KFI', '-x'] From python-checkins at python.org Fri Aug 16 14:38:01 2013 From: python-checkins at python.org (christian.heimes) Date: Fri, 16 Aug 2013 14:38:01 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318673=3A_Add_O=5F?= =?utf-8?q?TMPFILE_to_os_module=2E_O=5FTMPFILE_requires_Linux_kernel?= Message-ID: <3cGkZx62P8z7LjN@mail.python.org> http://hg.python.org/cpython/rev/f6034602410c changeset: 85188:f6034602410c user: Christian Heimes date: Fri Aug 16 14:35:09 2013 +0200 summary: Issue #18673: Add O_TMPFILE to os module. O_TMPFILE requires Linux kernel 3.11 or newer. It's only defined on system with 3.11 uapi headers, too. files: Doc/library/os.rst | 1 + Misc/NEWS | 3 +++ Modules/posixmodule.c | 3 +++ 3 files changed, 7 insertions(+), 0 deletions(-) diff --git a/Doc/library/os.rst b/Doc/library/os.rst --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -919,6 +919,7 @@ O_NOFOLLOW O_NOATIME O_PATH + O_TMPFILE These constants are GNU extensions and not present if they are not defined by the C library. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -28,6 +28,9 @@ Library ------- +- Issue #18673: Add O_TMPFILE to os module. O_TMPFILE requires Linux kernel + 3.11 or newer. It's only defined on system with 3.11 uapi headers, too. + - Issue #18532: Change the builtin hash algorithms' names to lower case names as promised by hashlib's documentation. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -11245,6 +11245,9 @@ #ifdef O_TTY_INIT if (PyModule_AddIntMacro(m, O_TTY_INIT)) return -1; #endif +#ifdef O_TMPFILE + if (PyModule_AddIntMacro(m, O_TMPFILE)) return -1; +#endif #ifdef PRIO_PROCESS if (PyModule_AddIntMacro(m, PRIO_PROCESS)) return -1; #endif -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 14:38:03 2013 From: python-checkins at python.org (christian.heimes) Date: Fri, 16 Aug 2013 14:38:03 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318673=3A_Add_vers?= =?utf-8?q?ionchanged_to_docs?= Message-ID: <3cGkZz0w4wz7LjR@mail.python.org> http://hg.python.org/cpython/rev/815b7bb3b08d changeset: 85189:815b7bb3b08d user: Christian Heimes date: Fri Aug 16 14:37:50 2013 +0200 summary: Issue #18673: Add versionchanged to docs files: Doc/library/os.rst | 4 ++++ 1 files changed, 4 insertions(+), 0 deletions(-) diff --git a/Doc/library/os.rst b/Doc/library/os.rst --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -924,6 +924,10 @@ These constants are GNU extensions and not present if they are not defined by the C library. + .. versionchanged:: 3.4 + Add :data:`O_TMPFILE` constant. It's only available on Linux Kernel 3.11 + or newer. + .. function:: openpty() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 19:21:35 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 16 Aug 2013 19:21:35 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE2MTkw?= =?utf-8?q?=3A_fix_random_module_recommendation_to_use_ssl=2ERAND=5Fbytes?= =?utf-8?b?KCku?= Message-ID: <3cGrt76wQYz7LjN@mail.python.org> http://hg.python.org/cpython/rev/9df0501fab35 changeset: 85190:9df0501fab35 branch: 3.3 parent: 85186:3105b78d3434 user: Antoine Pitrou date: Fri Aug 16 19:19:40 2013 +0200 summary: Issue #16190: fix random module recommendation to use ssl.RAND_bytes(). files: Doc/library/random.rst | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Doc/library/random.rst b/Doc/library/random.rst --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -45,9 +45,9 @@ .. warning:: - The generators of the :mod:`random` module should not be used for security - purposes. Use :func:`ssl.RAND_bytes` if you require a cryptographically - secure pseudorandom number generator. + The pseudo-random generators of this module should not be used for + security purposes. Use :func:`os.urandom` or :class:`SystemRandom` if + you require a cryptographically secure pseudo-random number generator. Bookkeeping functions: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 19:21:37 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 16 Aug 2013 19:21:37 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2316190=3A_fix_random_module_recommendation_to_us?= =?utf-8?b?ZSBzc2wuUkFORF9ieXRlcygpLg==?= Message-ID: <3cGrt91dJtz7Lk3@mail.python.org> http://hg.python.org/cpython/rev/04b50a1eb013 changeset: 85191:04b50a1eb013 parent: 85189:815b7bb3b08d parent: 85190:9df0501fab35 user: Antoine Pitrou date: Fri Aug 16 19:20:04 2013 +0200 summary: Issue #16190: fix random module recommendation to use ssl.RAND_bytes(). files: Doc/library/random.rst | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Doc/library/random.rst b/Doc/library/random.rst --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -45,9 +45,9 @@ .. warning:: - The generators of the :mod:`random` module should not be used for security - purposes. Use :func:`ssl.RAND_bytes` if you require a cryptographically - secure pseudorandom number generator. + The pseudo-random generators of this module should not be used for + security purposes. Use :func:`os.urandom` or :class:`SystemRandom` if + you require a cryptographically secure pseudo-random number generator. Bookkeeping functions: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 19:21:38 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 16 Aug 2013 19:21:38 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE2MTkw?= =?utf-8?q?=3A_fix_random_module_recommendation_to_use_ssl=2ERAND=5Fbytes?= =?utf-8?b?KCku?= Message-ID: <3cGrtB3TG0z7Lk4@mail.python.org> http://hg.python.org/cpython/rev/48b618a9ad10 changeset: 85192:48b618a9ad10 branch: 2.7 parent: 85185:7339dcff171f user: Antoine Pitrou date: Fri Aug 16 19:19:40 2013 +0200 summary: Issue #16190: fix random module recommendation to use ssl.RAND_bytes(). files: Doc/library/random.rst | 7 +++++++ 1 files changed, 7 insertions(+), 0 deletions(-) diff --git a/Doc/library/random.rst b/Doc/library/random.rst --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -60,6 +60,13 @@ uses the system function :func:`os.urandom` to generate random numbers from sources provided by the operating system. +.. warning:: + + The pseudo-random generators of this module should not be used for + security purposes. Use :func:`os.urandom` or :class:`SystemRandom` if + you require a cryptographically secure pseudo-random number generator. + + Bookkeeping functions: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 19:36:30 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 16 Aug 2013 19:36:30 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE2NjYz?= =?utf-8?q?18=3A_Add_a_test_that_shutil=2Ecopytree=28=29_retains_directory?= =?utf-8?q?_permissions=2E?= Message-ID: <3cGsCL5wc0z7LjM@mail.python.org> http://hg.python.org/cpython/rev/c388e93879c4 changeset: 85193:c388e93879c4 branch: 3.3 parent: 85190:9df0501fab35 user: Antoine Pitrou date: Fri Aug 16 19:35:02 2013 +0200 summary: Issue #1666318: Add a test that shutil.copytree() retains directory permissions. Patch by Catherine Devlin. files: Lib/test/test_shutil.py | 26 ++++++++++++++++++++++++++ Misc/NEWS | 3 +++ 2 files changed, 29 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -726,6 +726,32 @@ shutil.rmtree(src_dir) shutil.rmtree(os.path.dirname(dst_dir)) + def test_copytree_retains_permissions(self): + tmp_dir = tempfile.mkdtemp() + src_dir = os.path.join(tmp_dir, 'source') + os.mkdir(src_dir) + dst_dir = os.path.join(tmp_dir, 'destination') + self.addCleanup(shutil.rmtree, tmp_dir) + + os.chmod(src_dir, 0o777) + write_file((src_dir, 'permissive.txt'), '123') + os.chmod(os.path.join(src_dir, 'permissive.txt'), 0o777) + write_file((src_dir, 'restrictive.txt'), '456') + os.chmod(os.path.join(src_dir, 'restrictive.txt'), 0o600) + restrictive_subdir = tempfile.mkdtemp(dir=src_dir) + os.chmod(restrictive_subdir, 0o600) + + shutil.copytree(src_dir, dst_dir) + self.assertEquals(os.stat(src_dir).st_mode, os.stat(dst_dir).st_mode) + self.assertEquals(os.stat(os.path.join(src_dir, 'permissive.txt')).st_mode, + os.stat(os.path.join(dst_dir, 'permissive.txt')).st_mode) + self.assertEquals(os.stat(os.path.join(src_dir, 'restrictive.txt')).st_mode, + os.stat(os.path.join(dst_dir, 'restrictive.txt')).st_mode) + restrictive_subdir_dst = os.path.join(dst_dir, + os.path.split(restrictive_subdir)[1]) + self.assertEquals(os.stat(restrictive_subdir).st_mode, + os.stat(restrictive_subdir_dst).st_mode) + @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') def test_dont_copy_file_onto_link_to_itself(self): # Temporarily disable test on Windows. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -258,6 +258,9 @@ Tests ----- +- Issue #1666318: Add a test that shutil.copytree() retains directory + permissions. Patch by Catherine Devlin. + - Issue #18357: add tests for dictview set difference. Patch by Fraser Tweedale. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 19:36:32 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 16 Aug 2013 19:36:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=231666318=3A_Add_a_test_that_shutil=2Ecopytree=28?= =?utf-8?q?=29_retains_directory_permissions=2E?= Message-ID: <3cGsCN0sJtz7Ljh@mail.python.org> http://hg.python.org/cpython/rev/8906713d5704 changeset: 85194:8906713d5704 parent: 85191:04b50a1eb013 parent: 85193:c388e93879c4 user: Antoine Pitrou date: Fri Aug 16 19:36:18 2013 +0200 summary: Issue #1666318: Add a test that shutil.copytree() retains directory permissions. Patch by Catherine Devlin. files: Lib/test/test_shutil.py | 26 ++++++++++++++++++++++++++ Misc/NEWS | 3 +++ 2 files changed, 29 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -727,6 +727,32 @@ shutil.rmtree(src_dir) shutil.rmtree(os.path.dirname(dst_dir)) + def test_copytree_retains_permissions(self): + tmp_dir = tempfile.mkdtemp() + src_dir = os.path.join(tmp_dir, 'source') + os.mkdir(src_dir) + dst_dir = os.path.join(tmp_dir, 'destination') + self.addCleanup(shutil.rmtree, tmp_dir) + + os.chmod(src_dir, 0o777) + write_file((src_dir, 'permissive.txt'), '123') + os.chmod(os.path.join(src_dir, 'permissive.txt'), 0o777) + write_file((src_dir, 'restrictive.txt'), '456') + os.chmod(os.path.join(src_dir, 'restrictive.txt'), 0o600) + restrictive_subdir = tempfile.mkdtemp(dir=src_dir) + os.chmod(restrictive_subdir, 0o600) + + shutil.copytree(src_dir, dst_dir) + self.assertEquals(os.stat(src_dir).st_mode, os.stat(dst_dir).st_mode) + self.assertEquals(os.stat(os.path.join(src_dir, 'permissive.txt')).st_mode, + os.stat(os.path.join(dst_dir, 'permissive.txt')).st_mode) + self.assertEquals(os.stat(os.path.join(src_dir, 'restrictive.txt')).st_mode, + os.stat(os.path.join(dst_dir, 'restrictive.txt')).st_mode) + restrictive_subdir_dst = os.path.join(dst_dir, + os.path.split(restrictive_subdir)[1]) + self.assertEquals(os.stat(restrictive_subdir).st_mode, + os.stat(restrictive_subdir_dst).st_mode) + @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') def test_dont_copy_file_onto_link_to_itself(self): # Temporarily disable test on Windows. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -682,6 +682,9 @@ Tests ----- +- Issue #1666318: Add a test that shutil.copytree() retains directory + permissions. Patch by Catherine Devlin. + - Issue #18273: move the tests in Lib/test/json_tests to Lib/test/test_json and make them discoverable by unittest. Patch by Zachary Ware. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 20:33:43 2013 From: python-checkins at python.org (ezio.melotti) Date: Fri, 16 Aug 2013 20:33:43 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogIzE4NzA3OiBwb2lu?= =?utf-8?q?t_to_Doc/README=2Etxt_in_the_README_file=2E__Patch_by_Madison_M?= =?utf-8?b?YXku?= Message-ID: <3cGtTM2WGjz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/6bc88d61f302 changeset: 85195:6bc88d61f302 branch: 2.7 parent: 85192:48b618a9ad10 user: Ezio Melotti date: Fri Aug 16 21:32:25 2013 +0300 summary: #18707: point to Doc/README.txt in the README file. Patch by Madison May. files: README | 7 +++++++ 1 files changed, 7 insertions(+), 0 deletions(-) diff --git a/README b/README --- a/README +++ b/README @@ -89,6 +89,13 @@ primarily for documentation authors, translators, and people with special formatting requirements. +If you would like to contribute to the development of Python, relevant +documentation is available at: + + http://docs.python.org/devguide/ + +For information about building Python's documentation, refer to Doc/README.txt. + Web sites --------- -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 20:33:44 2013 From: python-checkins at python.org (ezio.melotti) Date: Fri, 16 Aug 2013 20:33:44 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NzA3OiBwb2lu?= =?utf-8?q?t_to_Doc/README=2Etxt_in_the_README_file=2E__Patch_by_Madison_M?= =?utf-8?b?YXku?= Message-ID: <3cGtTN4Lxnz7LjT@mail.python.org> http://hg.python.org/cpython/rev/477a143bfbfd changeset: 85196:477a143bfbfd branch: 3.3 parent: 85193:c388e93879c4 user: Ezio Melotti date: Fri Aug 16 21:32:25 2013 +0300 summary: #18707: point to Doc/README.txt in the README file. Patch by Madison May. files: README | 7 +++++++ 1 files changed, 7 insertions(+), 0 deletions(-) diff --git a/README b/README --- a/README +++ b/README @@ -76,6 +76,13 @@ is primarily for documentation authors, translators, and people with special formatting requirements. +If you would like to contribute to the development of Python, relevant +documentation is available at: + + http://docs.python.org/devguide/ + +For information about building Python's documentation, refer to Doc/README.txt. + Converting From Python 2.x to 3.x --------------------------------- -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 20:33:45 2013 From: python-checkins at python.org (ezio.melotti) Date: Fri, 16 Aug 2013 20:33:45 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NzA3OiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cGtTP6MkTz7LjT@mail.python.org> http://hg.python.org/cpython/rev/96c51207e7cd changeset: 85197:96c51207e7cd parent: 85194:8906713d5704 parent: 85196:477a143bfbfd user: Ezio Melotti date: Fri Aug 16 21:33:27 2013 +0300 summary: #18707: merge with 3.3. files: README | 7 +++++++ 1 files changed, 7 insertions(+), 0 deletions(-) diff --git a/README b/README --- a/README +++ b/README @@ -76,6 +76,13 @@ is primarily for documentation authors, translators, and people with special formatting requirements. +If you would like to contribute to the development of Python, relevant +documentation is available at: + + http://docs.python.org/devguide/ + +For information about building Python's documentation, refer to Doc/README.txt. + Converting From Python 2.x to 3.x --------------------------------- -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 20:52:06 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 16 Aug 2013 20:52:06 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzU2?= =?utf-8?q?=3A_Improve_error_reporting_in_os=2Eurandom=28=29_when_the_fail?= =?utf-8?q?ure_is_due?= Message-ID: <3cGttZ0hl3z7Ljh@mail.python.org> http://hg.python.org/cpython/rev/193bcc12575d changeset: 85198:193bcc12575d branch: 3.3 parent: 85196:477a143bfbfd user: Antoine Pitrou date: Fri Aug 16 20:44:38 2013 +0200 summary: Issue #18756: Improve error reporting in os.urandom() when the failure is due to something else than /dev/urandom not existing. files: Lib/test/test_os.py | 20 ++++++++++++++++++++ Misc/NEWS | 4 ++++ Python/random.c | 8 ++++++-- 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -28,6 +28,11 @@ import threading except ImportError: threading = None +try: + import resource +except ImportError: + resource = None + from test.script_helper import assert_python_ok with warnings.catch_warnings(): @@ -997,6 +1002,21 @@ data2 = self.get_urandom_subprocess(16) self.assertNotEqual(data1, data2) + @unittest.skipUnless(resource, "test requires the resource module") + def test_urandom_failure(self): + soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE) + resource.setrlimit(resource.RLIMIT_NOFILE, (1, hard_limit)) + try: + with self.assertRaises(OSError) as cm: + os.urandom(16) + self.assertEqual(cm.exception.errno, errno.EMFILE) + finally: + # We restore the old limit as soon as possible. If doing it + # using addCleanup(), code running in between would fail + # creating any file descriptor. + resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit)) + + @contextlib.contextmanager def _execvpe_mockup(defpath=None): """ diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,10 @@ Library ------- +- Issue #18756: Improve error reporting in os.urandom() when the failure + is due to something else than /dev/urandom not existing (for example, + exhausting the file descriptor limit). + - Issue #18405: Improve the entropy of crypt.mksalt(). - Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get diff --git a/Python/random.c b/Python/random.c --- a/Python/random.c +++ b/Python/random.c @@ -165,8 +165,12 @@ Py_END_ALLOW_THREADS if (fd < 0) { - PyErr_SetString(PyExc_NotImplementedError, - "/dev/urandom (or equivalent) not found"); + if (errno == ENOENT || errno == ENXIO || + errno == ENODEV || errno == EACCES) + PyErr_SetString(PyExc_NotImplementedError, + "/dev/urandom (or equivalent) not found"); + else + PyErr_SetFromErrno(PyExc_OSError); return -1; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 20:52:07 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 16 Aug 2013 20:52:07 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318756=3A_Improve_error_reporting_in_os=2Eurando?= =?utf-8?q?m=28=29_when_the_failure_is_due?= Message-ID: <3cGttb2mRmz7Lk1@mail.python.org> http://hg.python.org/cpython/rev/fe949918616c changeset: 85199:fe949918616c parent: 85197:96c51207e7cd parent: 85198:193bcc12575d user: Antoine Pitrou date: Fri Aug 16 20:49:32 2013 +0200 summary: Issue #18756: Improve error reporting in os.urandom() when the failure is due to something else than /dev/urandom not existing. files: Lib/test/test_os.py | 20 ++++++++++++++++++++ Misc/NEWS | 4 ++++ Python/random.c | 8 ++++++-- 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -30,6 +30,11 @@ import threading except ImportError: threading = None +try: + import resource +except ImportError: + resource = None + from test.script_helper import assert_python_ok with warnings.catch_warnings(): @@ -1010,6 +1015,21 @@ data2 = self.get_urandom_subprocess(16) self.assertNotEqual(data1, data2) + @unittest.skipUnless(resource, "test requires the resource module") + def test_urandom_failure(self): + soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE) + resource.setrlimit(resource.RLIMIT_NOFILE, (1, hard_limit)) + try: + with self.assertRaises(OSError) as cm: + os.urandom(16) + self.assertEqual(cm.exception.errno, errno.EMFILE) + finally: + # We restore the old limit as soon as possible. If doing it + # using addCleanup(), code running in between would fail + # creating any file descriptor. + resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit)) + + @contextlib.contextmanager def _execvpe_mockup(defpath=None): """ diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -28,6 +28,10 @@ Library ------- +- Issue #18756: Improve error reporting in os.urandom() when the failure + is due to something else than /dev/urandom not existing (for example, + exhausting the file descriptor limit). + - Issue #18673: Add O_TMPFILE to os module. O_TMPFILE requires Linux kernel 3.11 or newer. It's only defined on system with 3.11 uapi headers, too. diff --git a/Python/random.c b/Python/random.c --- a/Python/random.c +++ b/Python/random.c @@ -138,8 +138,12 @@ Py_END_ALLOW_THREADS if (fd < 0) { - PyErr_SetString(PyExc_NotImplementedError, - "/dev/urandom (or equivalent) not found"); + if (errno == ENOENT || errno == ENXIO || + errno == ENODEV || errno == EACCES) + PyErr_SetString(PyExc_NotImplementedError, + "/dev/urandom (or equivalent) not found"); + else + PyErr_SetFromErrno(PyExc_OSError); return -1; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 20:54:19 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 16 Aug 2013 20:54:19 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NzU2?= =?utf-8?q?=3A_Improve_error_reporting_in_os=2Eurandom=28=29_when_the_fail?= =?utf-8?q?ure_is_due?= Message-ID: <3cGtx75f59z7LjM@mail.python.org> http://hg.python.org/cpython/rev/ec296a36156b changeset: 85200:ec296a36156b branch: 2.7 parent: 85195:6bc88d61f302 user: Antoine Pitrou date: Fri Aug 16 20:44:38 2013 +0200 summary: Issue #18756: Improve error reporting in os.urandom() when the failure is due to something else than /dev/urandom not existing. files: Lib/test/test_os.py | 18 ++++++++++++++++++ Misc/NEWS | 4 ++++ Python/random.c | 8 ++++++-- 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -10,6 +10,10 @@ import signal import subprocess import time +try: + import resource +except ImportError: + resource = None from test import test_support import mmap @@ -563,6 +567,20 @@ data2 = self.get_urandom_subprocess(16) self.assertNotEqual(data1, data2) + @unittest.skipUnless(resource, "test requires the resource module") + def test_urandom_failure(self): + soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE) + resource.setrlimit(resource.RLIMIT_NOFILE, (1, hard_limit)) + try: + with self.assertRaises(OSError) as cm: + os.urandom(16) + self.assertEqual(cm.exception.errno, errno.EMFILE) + finally: + # We restore the old limit as soon as possible. If doing it + # using addCleanup(), code running in between would fail + # creating any file descriptor. + resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit)) + def test_execvpe_with_bad_arglist(self): self.assertRaises(ValueError, os.execvpe, 'notepad', [], None) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,10 @@ Library ------- +- Issue #18756: Improve error reporting in os.urandom() when the failure + is due to something else than /dev/urandom not existing (for example, + exhausting the file descriptor limit). + - Fix tkinter regression introduced by the security fix in issue #16248. - Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get diff --git a/Python/random.c b/Python/random.c --- a/Python/random.c +++ b/Python/random.c @@ -165,8 +165,12 @@ Py_END_ALLOW_THREADS if (fd < 0) { - PyErr_SetString(PyExc_NotImplementedError, - "/dev/urandom (or equivalent) not found"); + if (errno == ENOENT || errno == ENXIO || + errno == ENODEV || errno == EACCES) + PyErr_SetString(PyExc_NotImplementedError, + "/dev/urandom (or equivalent) not found"); + else + PyErr_SetFromErrno(PyExc_OSError); return -1; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 20:56:21 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 16 Aug 2013 20:56:21 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogdGVzdF9leGVjdnBl?= =?utf-8?q?=5Fwith=5Fbad=5Farglist_shouldn=27t_be_in_URandomTests?= Message-ID: <3cGtzT3t3hz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/87fcc13ade91 changeset: 85201:87fcc13ade91 branch: 2.7 user: Antoine Pitrou date: Fri Aug 16 20:56:12 2013 +0200 summary: test_execvpe_with_bad_arglist shouldn't be in URandomTests files: Lib/test/test_os.py | 5 +++++ 1 files changed, 5 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -581,9 +581,13 @@ # creating any file descriptor. resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit)) + +class ExecvpeTests(unittest.TestCase): + def test_execvpe_with_bad_arglist(self): self.assertRaises(ValueError, os.execvpe, 'notepad', [], None) + class Win32ErrorTests(unittest.TestCase): def test_rename(self): self.assertRaises(WindowsError, os.rename, test_support.TESTFN, test_support.TESTFN+".bak") @@ -870,6 +874,7 @@ MakedirTests, DevNullTests, URandomTests, + ExecvpeTests, Win32ErrorTests, TestInvalidFD, PosixUidGidTests, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 21:02:11 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 16 Aug 2013 21:02:11 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Make_test=2Esupport=2Ereap?= =?utf-8?q?=5Fthreads=28=29_faster?= Message-ID: <3cGv6C4sqFzLr7@mail.python.org> http://hg.python.org/cpython/rev/62c6cba69e9b changeset: 85202:62c6cba69e9b parent: 85199:fe949918616c user: Antoine Pitrou date: Fri Aug 16 21:02:02 2013 +0200 summary: Make test.support.reap_threads() faster files: Lib/test/support/__init__.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1750,12 +1750,12 @@ def threading_cleanup(*original_values): if not _thread: return - _MAX_COUNT = 10 + _MAX_COUNT = 100 for count in range(_MAX_COUNT): values = _thread._count(), threading._dangling if values == original_values: break - time.sleep(0.1) + time.sleep(0.01) gc_collect() # XXX print a warning in case of failure? -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 22:31:22 2013 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 16 Aug 2013 22:31:22 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Fix_the_default_placeholde?= =?utf-8?b?ciBpbiB0ZXh0d3JhcC5zaG9ydGVuKCkgdG8gYmUgIiBbLi4uXSIu?= Message-ID: <3cGx5637tXz7LjN@mail.python.org> http://hg.python.org/cpython/rev/be5481bf4c57 changeset: 85203:be5481bf4c57 user: Antoine Pitrou date: Fri Aug 16 22:31:12 2013 +0200 summary: Fix the default placeholder in textwrap.shorten() to be " [...]". For some reason I forgot to do it before committing the patch in issue #18585. files: Doc/library/textwrap.rst | 6 +++--- Lib/test/test_textwrap.py | 12 ++++++------ Lib/textwrap.py | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/Doc/library/textwrap.rst b/Doc/library/textwrap.rst --- a/Doc/library/textwrap.rst +++ b/Doc/library/textwrap.rst @@ -40,7 +40,7 @@ :func:`wrap`. -.. function:: shorten(text, width=70, *, placeholder=" (...)") +.. function:: shorten(text, width=70, *, placeholder=" [...]") Collapse and truncate the given text to fit in the given width. @@ -51,7 +51,7 @@ >>> textwrap.shorten("Hello world!", width=12) 'Hello world!' >>> textwrap.shorten("Hello world!", width=11) - 'Hello (...)' + 'Hello [...]' >>> textwrap.shorten("Hello world", width=10, placeholder="...") 'Hello...' @@ -268,7 +268,7 @@ containing the wrapped paragraph. - .. function:: shorten(text, *, placeholder=" (...)") + .. function:: shorten(text, *, placeholder=" [...]") Collapse and truncate the given text to fit in :attr:`width` characters. diff --git a/Lib/test/test_textwrap.py b/Lib/test/test_textwrap.py --- a/Lib/test/test_textwrap.py +++ b/Lib/test/test_textwrap.py @@ -786,11 +786,11 @@ # Simple case: just words, spaces, and a bit of punctuation text = "Hello there, how are you this fine day? I'm glad to hear it!" - self.check_shorten(text, 18, "Hello there, (...)") + self.check_shorten(text, 18, "Hello there, [...]") self.check_shorten(text, len(text), text) self.check_shorten(text, len(text) - 1, "Hello there, how are you this fine day? " - "I'm glad to (...)") + "I'm glad to [...]") def test_placeholder(self): text = "Hello there, how are you this fine day? I'm glad to hear it!" @@ -816,13 +816,13 @@ "breaks and tabs too.") self.check_shorten(text, 61, "This is a paragraph that already has line " - "breaks and (...)") + "breaks and [...]") self.check_shorten("hello world! ", 12, "hello world!") - self.check_shorten("hello world! ", 11, "hello (...)") + self.check_shorten("hello world! ", 11, "hello [...]") # The leading space is trimmed from the placeholder # (it would be ugly otherwise). - self.check_shorten("hello world! ", 10, "(...)") + self.check_shorten("hello world! ", 10, "[...]") def test_width_too_small_for_placeholder(self): wrapper = TextWrapper(width=8) @@ -831,7 +831,7 @@ wrapper.shorten("x" * 20, placeholder="(.......)") def test_first_word_too_long_but_placeholder_fits(self): - self.check_shorten("Helloo", 5, "(...)") + self.check_shorten("Helloo", 5, "[...]") if __name__ == '__main__': diff --git a/Lib/textwrap.py b/Lib/textwrap.py --- a/Lib/textwrap.py +++ b/Lib/textwrap.py @@ -19,7 +19,7 @@ # since 0xa0 is not in range(128). _whitespace = '\t\n\x0b\x0c\r ' -_default_placeholder = ' (...)' +_default_placeholder = ' [...]' class TextWrapper: """ @@ -376,7 +376,7 @@ >>> textwrap.shorten("Hello world!", width=12) 'Hello world!' >>> textwrap.shorten("Hello world!", width=11) - 'Hello (...)' + 'Hello [...]' """ w = TextWrapper(width=width, **kwargs) return w.shorten(text, placeholder=placeholder) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 23:13:16 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Fri, 16 Aug 2013 23:13:16 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzQz?= =?utf-8?q?=3A_Fix_references_to_non-existant_=22StringIO=22_module=2E?= Message-ID: <3cGy1S5nMkz7LjN@mail.python.org> http://hg.python.org/cpython/rev/b23ad357c214 changeset: 85204:b23ad357c214 branch: 3.3 parent: 85198:193bcc12575d user: Serhiy Storchaka date: Sat Aug 17 00:09:55 2013 +0300 summary: Issue #18743: Fix references to non-existant "StringIO" module. files: Doc/c-api/typeobj.rst | 2 +- Doc/library/gzip.rst | 2 +- Doc/library/mailbox.rst | 4 ++-- Doc/library/tempfile.rst | 2 +- Doc/library/unittest.mock.rst | 6 +++--- Doc/library/xml.dom.minidom.rst | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -192,7 +192,7 @@ An optional pointer to the instance print function. The print function is only called when the instance is printed to a *real* file; - when it is printed to a pseudo-file (like a :class:`StringIO` instance), the + when it is printed to a pseudo-file (like a :class:`io.StringIO` instance), the instance's :c:member:`~PyTypeObject.tp_repr` or :c:member:`~PyTypeObject.tp_str` function is called to convert it to a string. These are also called when the type's :c:member:`~PyTypeObject.tp_print` field is *NULL*. A type should never implement :c:member:`~PyTypeObject.tp_print` in a way that produces diff --git a/Doc/library/gzip.rst b/Doc/library/gzip.rst --- a/Doc/library/gzip.rst +++ b/Doc/library/gzip.rst @@ -62,7 +62,7 @@ value. The new class instance is based on *fileobj*, which can be a regular file, a - :class:`StringIO` object, or any other object which simulates a file. It + :class:`io.BytesIO` object, or any other object which simulates a file. It defaults to ``None``, in which case *filename* is opened to provide a file object. diff --git a/Doc/library/mailbox.rst b/Doc/library/mailbox.rst --- a/Doc/library/mailbox.rst +++ b/Doc/library/mailbox.rst @@ -674,8 +674,8 @@ In Babyl mailboxes, the headers of a message are not stored contiguously with the body of the message. To generate a file-like representation, the - headers and body are copied together into a :class:`StringIO` instance - (from the :mod:`StringIO` module), which has an API identical to that of a + headers and body are copied together into a :class:`io.BytesIO` instance, + which has an API identical to that of a file. As a result, the file-like object is truly independent of the underlying mailbox but does not save memory compared to a string representation. diff --git a/Doc/library/tempfile.rst b/Doc/library/tempfile.rst --- a/Doc/library/tempfile.rst +++ b/Doc/library/tempfile.rst @@ -82,7 +82,7 @@ causes the file to roll over to an on-disk file regardless of its size. The returned object is a file-like object whose :attr:`_file` attribute - is either a :class:`BytesIO` or :class:`StringIO` object (depending on + is either a :class:`io.BytesIO` or :class:`io.StringIO` object (depending on whether binary or text *mode* was specified) or a true file object, depending on whether :func:`rollover` has been called. This file-like object can be used in a :keyword:`with` statement, just like diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -1084,9 +1084,9 @@ ... TypeError: 'NonCallableMock' object is not callable -Another use case might be to replace an object with a `StringIO` instance: - - >>> from StringIO import StringIO +Another use case might be to replace an object with a `io.StringIO` instance: + + >>> from io import StringIO >>> def foo(): ... print 'Something' ... diff --git a/Doc/library/xml.dom.minidom.rst b/Doc/library/xml.dom.minidom.rst --- a/Doc/library/xml.dom.minidom.rst +++ b/Doc/library/xml.dom.minidom.rst @@ -55,7 +55,7 @@ .. function:: parseString(string, parser=None) Return a :class:`Document` that represents the *string*. This method creates a - :class:`StringIO` object for the string and passes that on to :func:`parse`. + :class:`io.StringIO` object for the string and passes that on to :func:`parse`. Both functions return a :class:`Document` object representing the content of the document. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 23:13:18 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Fri, 16 Aug 2013 23:13:18 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318743=3A_Fix_references_to_non-existant_=22Stri?= =?utf-8?q?ngIO=22_module=2E?= Message-ID: <3cGy1V1nDXz7LjV@mail.python.org> http://hg.python.org/cpython/rev/3acbd7aca856 changeset: 85205:3acbd7aca856 parent: 85203:be5481bf4c57 parent: 85204:b23ad357c214 user: Serhiy Storchaka date: Sat Aug 17 00:11:54 2013 +0300 summary: Issue #18743: Fix references to non-existant "StringIO" module. files: Doc/c-api/typeobj.rst | 2 +- Doc/library/gzip.rst | 2 +- Doc/library/mailbox.rst | 4 ++-- Doc/library/tempfile.rst | 2 +- Doc/library/unittest.mock.rst | 6 +++--- Doc/library/xml.dom.minidom.rst | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -192,7 +192,7 @@ An optional pointer to the instance print function. The print function is only called when the instance is printed to a *real* file; - when it is printed to a pseudo-file (like a :class:`StringIO` instance), the + when it is printed to a pseudo-file (like a :class:`io.StringIO` instance), the instance's :c:member:`~PyTypeObject.tp_repr` or :c:member:`~PyTypeObject.tp_str` function is called to convert it to a string. These are also called when the type's :c:member:`~PyTypeObject.tp_print` field is *NULL*. A type should never implement :c:member:`~PyTypeObject.tp_print` in a way that produces diff --git a/Doc/library/gzip.rst b/Doc/library/gzip.rst --- a/Doc/library/gzip.rst +++ b/Doc/library/gzip.rst @@ -62,7 +62,7 @@ value. The new class instance is based on *fileobj*, which can be a regular file, a - :class:`StringIO` object, or any other object which simulates a file. It + :class:`io.BytesIO` object, or any other object which simulates a file. It defaults to ``None``, in which case *filename* is opened to provide a file object. diff --git a/Doc/library/mailbox.rst b/Doc/library/mailbox.rst --- a/Doc/library/mailbox.rst +++ b/Doc/library/mailbox.rst @@ -674,8 +674,8 @@ In Babyl mailboxes, the headers of a message are not stored contiguously with the body of the message. To generate a file-like representation, the - headers and body are copied together into a :class:`StringIO` instance - (from the :mod:`StringIO` module), which has an API identical to that of a + headers and body are copied together into a :class:`io.BytesIO` instance, + which has an API identical to that of a file. As a result, the file-like object is truly independent of the underlying mailbox but does not save memory compared to a string representation. diff --git a/Doc/library/tempfile.rst b/Doc/library/tempfile.rst --- a/Doc/library/tempfile.rst +++ b/Doc/library/tempfile.rst @@ -82,7 +82,7 @@ causes the file to roll over to an on-disk file regardless of its size. The returned object is a file-like object whose :attr:`_file` attribute - is either a :class:`BytesIO` or :class:`StringIO` object (depending on + is either a :class:`io.BytesIO` or :class:`io.StringIO` object (depending on whether binary or text *mode* was specified) or a true file object, depending on whether :func:`rollover` has been called. This file-like object can be used in a :keyword:`with` statement, just like diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -1104,9 +1104,9 @@ ... TypeError: 'NonCallableMock' object is not callable -Another use case might be to replace an object with a `StringIO` instance: - - >>> from StringIO import StringIO +Another use case might be to replace an object with a `io.StringIO` instance: + + >>> from io import StringIO >>> def foo(): ... print 'Something' ... diff --git a/Doc/library/xml.dom.minidom.rst b/Doc/library/xml.dom.minidom.rst --- a/Doc/library/xml.dom.minidom.rst +++ b/Doc/library/xml.dom.minidom.rst @@ -55,7 +55,7 @@ .. function:: parseString(string, parser=None) Return a :class:`Document` that represents the *string*. This method creates a - :class:`StringIO` object for the string and passes that on to :func:`parse`. + :class:`io.StringIO` object for the string and passes that on to :func:`parse`. Both functions return a :class:`Document` object representing the content of the document. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 23:22:17 2013 From: python-checkins at python.org (charles-francois.natali) Date: Fri, 16 Aug 2013 23:22:17 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE2NDYz?= =?utf-8?q?=3A_Fix_a_transient_test=5Ftimeout_failure=2E?= Message-ID: <3cGyCs1Xlzz7LjN@mail.python.org> http://hg.python.org/cpython/rev/5880c7d33e13 changeset: 85206:5880c7d33e13 branch: 3.3 parent: 85204:b23ad357c214 user: Charles-Fran?ois Natali date: Fri Aug 16 23:19:22 2013 +0200 summary: Issue #16463: Fix a transient test_timeout failure. files: Lib/test/test_timeout.py | 19 ++++++++++++++++--- 1 files changed, 16 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_timeout.py b/Lib/test/test_timeout.py --- a/Lib/test/test_timeout.py +++ b/Lib/test/test_timeout.py @@ -1,5 +1,6 @@ """Unit tests for socket timeout feature.""" +import functools import unittest from test import support @@ -11,6 +12,18 @@ import socket + at functools.lru_cache() +def resolve_address(host, port): + """Resolve an (host, port) to an address. + + We must perform name resolution before timeout tests, otherwise it will be + performed by connect(). + """ + with support.transient_internet(host): + return socket.getaddrinfo(host, port, socket.AF_INET, + socket.SOCK_STREAM)[0][4] + + class CreationTestCase(unittest.TestCase): """Test case for socket.gettimeout() and socket.settimeout()""" @@ -132,7 +145,7 @@ def setUp(self): self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.addr_remote = ('www.python.org.', 80) + self.addr_remote = resolve_address('www.python.org.', 80) def tearDown(self): self.sock.close() @@ -142,7 +155,7 @@ # to a host that silently drops our packets. We can't simulate this # from Python because it's a function of the underlying TCP/IP stack. # So, the following Snakebite host has been defined: - blackhole = ('blackhole.snakebite.net', 56666) + blackhole = resolve_address('blackhole.snakebite.net', 56666) # Blackhole has been configured to silently drop any incoming packets. # No RSTs (for TCP) or ICMP UNREACH (for UDP/ICMP) will be sent back @@ -154,7 +167,7 @@ # to firewalling or general network configuration. In order to improve # our confidence in testing the blackhole, a corresponding 'whitehole' # has also been set up using one port higher: - whitehole = ('whitehole.snakebite.net', 56667) + whitehole = resolve_address('whitehole.snakebite.net', 56667) # This address has been configured to immediately drop any incoming # packets as well, but it does it respectfully with regards to the -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 23:22:18 2013 From: python-checkins at python.org (charles-francois.natali) Date: Fri, 16 Aug 2013 23:22:18 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2316463=3A_Fix_a_transient_test=5Ftimeout_failure?= =?utf-8?q?=2E?= Message-ID: <3cGyCt3PvGz7LjV@mail.python.org> http://hg.python.org/cpython/rev/5d4fe1da2c90 changeset: 85207:5d4fe1da2c90 parent: 85205:3acbd7aca856 parent: 85206:5880c7d33e13 user: Charles-Fran?ois Natali date: Fri Aug 16 23:19:56 2013 +0200 summary: Issue #16463: Fix a transient test_timeout failure. files: Lib/test/test_timeout.py | 19 ++++++++++++++++--- 1 files changed, 16 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_timeout.py b/Lib/test/test_timeout.py --- a/Lib/test/test_timeout.py +++ b/Lib/test/test_timeout.py @@ -1,5 +1,6 @@ """Unit tests for socket timeout feature.""" +import functools import unittest from test import support @@ -11,6 +12,18 @@ import socket + at functools.lru_cache() +def resolve_address(host, port): + """Resolve an (host, port) to an address. + + We must perform name resolution before timeout tests, otherwise it will be + performed by connect(). + """ + with support.transient_internet(host): + return socket.getaddrinfo(host, port, socket.AF_INET, + socket.SOCK_STREAM)[0][4] + + class CreationTestCase(unittest.TestCase): """Test case for socket.gettimeout() and socket.settimeout()""" @@ -132,7 +145,7 @@ def setUp(self): self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.addr_remote = ('www.python.org.', 80) + self.addr_remote = resolve_address('www.python.org.', 80) def tearDown(self): self.sock.close() @@ -142,7 +155,7 @@ # to a host that silently drops our packets. We can't simulate this # from Python because it's a function of the underlying TCP/IP stack. # So, the following Snakebite host has been defined: - blackhole = ('blackhole.snakebite.net', 56666) + blackhole = resolve_address('blackhole.snakebite.net', 56666) # Blackhole has been configured to silently drop any incoming packets. # No RSTs (for TCP) or ICMP UNREACH (for UDP/ICMP) will be sent back @@ -154,7 +167,7 @@ # to firewalling or general network configuration. In order to improve # our confidence in testing the blackhole, a corresponding 'whitehole' # has also been set up using one port higher: - whitehole = ('whitehole.snakebite.net', 56667) + whitehole = resolve_address('whitehole.snakebite.net', 56667) # This address has been configured to immediately drop any incoming # packets as well, but it does it respectfully with regards to the -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 16 23:50:30 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Fri, 16 Aug 2013 23:50:30 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318701=3A_Remove_s?= =?utf-8?q?upport_of_old_CPython_versions_=28=3C3=2E0=29_from_C_code=2E?= Message-ID: <3cGyrQ30wHzPNV@mail.python.org> http://hg.python.org/cpython/rev/a04d957c8d07 changeset: 85208:a04d957c8d07 user: Serhiy Storchaka date: Sat Aug 17 00:48:02 2013 +0300 summary: Issue #18701: Remove support of old CPython versions (<3.0) from C code. files: Misc/NEWS | 2 + Modules/_ctypes/_ctypes.c | 24 +---------------- Modules/_sqlite/cache.c | 1 - Modules/_sqlite/connection.c | 1 - Modules/_sqlite/cursor.c | 1 - Modules/_sqlite/prepare_protocol.c | 1 - Modules/_sqlite/row.c | 1 - Modules/_sqlite/statement.c | 1 - Modules/_sre.c | 17 ------------ Objects/stringlib/unicodedefs.h | 5 --- PC/VS9.0/_sqlite3.vcproj | 4 --- PCbuild/_sqlite3.vcxproj | 1 - PCbuild/_sqlite3.vcxproj.filters | 3 -- 13 files changed, 4 insertions(+), 58 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -28,6 +28,8 @@ Library ------- +- Issue #18701: Remove support of old CPython versions (<3.0) from C code. + - Issue #18756: Improve error reporting in os.urandom() when the failure is due to something else than /dev/urandom not existing (for example, exhausting the file descriptor limit). diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -428,13 +428,7 @@ StgDictObject *dict = PyType_stgdict(type); assert (dict); - if (!PyArg_ParseTuple(args, -#if (PY_VERSION_HEX < 0x02050000) - "O|i:from_buffer", -#else - "O|n:from_buffer", -#endif - &obj, &offset)) + if (!PyArg_ParseTuple(args, "O|n:from_buffer", &obj, &offset)) return NULL; if (-1 == PyObject_AsWriteBuffer(obj, &buffer, &buffer_len)) @@ -447,11 +441,7 @@ } if (dict->size > buffer_len - offset) { PyErr_Format(PyExc_ValueError, -#if (PY_VERSION_HEX < 0x02050000) - "Buffer size too small (%d instead of at least %d bytes)", -#else "Buffer size too small (%zd instead of at least %zd bytes)", -#endif buffer_len, dict->size + offset); return NULL; } @@ -484,13 +474,7 @@ StgDictObject *dict = PyType_stgdict(type); assert (dict); - if (!PyArg_ParseTuple(args, -#if (PY_VERSION_HEX < 0x02050000) - "O|i:from_buffer", -#else - "O|n:from_buffer", -#endif - &obj, &offset)) + if (!PyArg_ParseTuple(args, "O|n:from_buffer", &obj, &offset)) return NULL; if (-1 == PyObject_AsReadBuffer(obj, (const void**)&buffer, &buffer_len)) @@ -504,11 +488,7 @@ if (dict->size > buffer_len - offset) { PyErr_Format(PyExc_ValueError, -#if (PY_VERSION_HEX < 0x02050000) - "Buffer size too small (%d instead of at least %d bytes)", -#else "Buffer size too small (%zd instead of at least %zd bytes)", -#endif buffer_len, dict->size + offset); return NULL; } diff --git a/Modules/_sqlite/cache.c b/Modules/_sqlite/cache.c --- a/Modules/_sqlite/cache.c +++ b/Modules/_sqlite/cache.c @@ -21,7 +21,6 @@ * 3. This notice may not be removed or altered from any source distribution. */ -#include "sqlitecompat.h" #include "cache.h" #include diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -29,7 +29,6 @@ #include "cursor.h" #include "prepare_protocol.h" #include "util.h" -#include "sqlitecompat.h" #include "pythread.h" diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c --- a/Modules/_sqlite/cursor.c +++ b/Modules/_sqlite/cursor.c @@ -24,7 +24,6 @@ #include "cursor.h" #include "module.h" #include "util.h" -#include "sqlitecompat.h" PyObject* pysqlite_cursor_iternext(pysqlite_Cursor* self); diff --git a/Modules/_sqlite/prepare_protocol.c b/Modules/_sqlite/prepare_protocol.c --- a/Modules/_sqlite/prepare_protocol.c +++ b/Modules/_sqlite/prepare_protocol.c @@ -21,7 +21,6 @@ * 3. This notice may not be removed or altered from any source distribution. */ -#include "sqlitecompat.h" #include "prepare_protocol.h" int pysqlite_prepare_protocol_init(pysqlite_PrepareProtocol* self, PyObject* args, PyObject* kwargs) diff --git a/Modules/_sqlite/row.c b/Modules/_sqlite/row.c --- a/Modules/_sqlite/row.c +++ b/Modules/_sqlite/row.c @@ -23,7 +23,6 @@ #include "row.h" #include "cursor.h" -#include "sqlitecompat.h" void pysqlite_row_dealloc(pysqlite_Row* self) { diff --git a/Modules/_sqlite/statement.c b/Modules/_sqlite/statement.c --- a/Modules/_sqlite/statement.c +++ b/Modules/_sqlite/statement.c @@ -27,7 +27,6 @@ #include "microprotocols.h" #include "prepare_protocol.h" #include "util.h" -#include "sqlitecompat.h" /* prototypes */ static int pysqlite_check_remaining_sql(const char* tail); diff --git a/Modules/_sre.c b/Modules/_sre.c --- a/Modules/_sre.c +++ b/Modules/_sre.c @@ -70,10 +70,6 @@ /* enables copy/deepcopy handling (work in progress) */ #undef USE_BUILTIN_COPY -#if PY_VERSION_HEX < 0x01060000 -#define PyObject_DEL(op) PyMem_DEL((op)) -#endif - /* -------------------------------------------------------------------- */ #if defined(_MSC_VER) @@ -1993,10 +1989,8 @@ /* join list elements */ PyObject* joiner; -#if PY_VERSION_HEX >= 0x01060000 PyObject* function; PyObject* args; -#endif PyObject* result; joiner = PySequence_GetSlice(string, 0, 0); @@ -2008,7 +2002,6 @@ return joiner; } -#if PY_VERSION_HEX >= 0x01060000 function = PyObject_GetAttrString(joiner, "join"); if (!function) { Py_DECREF(joiner); @@ -2024,12 +2017,6 @@ result = PyObject_CallObject(function, args); Py_DECREF(args); /* also removes list */ Py_DECREF(function); -#else - result = call( - "string", "join", - PyTuple_Pack(2, list, joiner) - ); -#endif Py_DECREF(joiner); return result; @@ -2136,7 +2123,6 @@ } -#if PY_VERSION_HEX >= 0x02020000 static PyObject* pattern_finditer(PatternObject* pattern, PyObject* args, PyObject* kw) { @@ -2158,7 +2144,6 @@ return iterator; } -#endif static PyObject* pattern_split(PatternObject* self, PyObject* args, PyObject* kw) @@ -2581,10 +2566,8 @@ pattern_split_doc}, {"findall", (PyCFunction) pattern_findall, METH_VARARGS|METH_KEYWORDS, pattern_findall_doc}, -#if PY_VERSION_HEX >= 0x02020000 {"finditer", (PyCFunction) pattern_finditer, METH_VARARGS|METH_KEYWORDS, pattern_finditer_doc}, -#endif {"scanner", (PyCFunction) pattern_scanner, METH_VARARGS|METH_KEYWORDS}, {"__copy__", (PyCFunction) pattern_copy, METH_NOARGS}, {"__deepcopy__", (PyCFunction) pattern_deepcopy, METH_O}, diff --git a/Objects/stringlib/unicodedefs.h b/Objects/stringlib/unicodedefs.h --- a/Objects/stringlib/unicodedefs.h +++ b/Objects/stringlib/unicodedefs.h @@ -24,13 +24,8 @@ #define STRINGLIB_CHECK PyUnicode_Check #define STRINGLIB_CHECK_EXACT PyUnicode_CheckExact -#if PY_VERSION_HEX < 0x03000000 -#define STRINGLIB_TOSTR PyObject_Unicode -#define STRINGLIB_TOASCII PyObject_Repr -#else #define STRINGLIB_TOSTR PyObject_Str #define STRINGLIB_TOASCII PyObject_ASCII -#endif #define STRINGLIB_WANT_CONTAINS_OBJ 1 diff --git a/PC/VS9.0/_sqlite3.vcproj b/PC/VS9.0/_sqlite3.vcproj --- a/PC/VS9.0/_sqlite3.vcproj +++ b/PC/VS9.0/_sqlite3.vcproj @@ -555,10 +555,6 @@ > - - diff --git a/PCbuild/_sqlite3.vcxproj b/PCbuild/_sqlite3.vcxproj --- a/PCbuild/_sqlite3.vcxproj +++ b/PCbuild/_sqlite3.vcxproj @@ -243,7 +243,6 @@ - diff --git a/PCbuild/_sqlite3.vcxproj.filters b/PCbuild/_sqlite3.vcxproj.filters --- a/PCbuild/_sqlite3.vcxproj.filters +++ b/PCbuild/_sqlite3.vcxproj.filters @@ -30,9 +30,6 @@ Header Files - - Header Files - Header Files -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 00:04:10 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Sat, 17 Aug 2013 00:04:10 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318701=3A_Remove_s?= =?utf-8?q?qlitecompat=2Eh=2E?= Message-ID: <3cGz8B3k6dzRlg@mail.python.org> http://hg.python.org/cpython/rev/2150c7d6426c changeset: 85209:2150c7d6426c user: Serhiy Storchaka date: Sat Aug 17 01:01:23 2013 +0300 summary: Issue #18701: Remove sqlitecompat.h. files: Modules/_sqlite/sqlitecompat.h | 63 ---------------------- 1 files changed, 0 insertions(+), 63 deletions(-) diff --git a/Modules/_sqlite/sqlitecompat.h b/Modules/_sqlite/sqlitecompat.h deleted file mode 100644 --- a/Modules/_sqlite/sqlitecompat.h +++ /dev/null @@ -1,63 +0,0 @@ -/* sqlitecompat.h - compatibility macros - * - * Copyright (C) 2006-2010 Gerhard H?ring - * - * This file is part of pysqlite. - * - * This software is provided 'as-is', without any express or implied - * warranty. In no event will the authors be held liable for any damages - * arising from the use of this software. - * - * Permission is granted to anyone to use this software for any purpose, - * including commercial applications, and to alter it and redistribute it - * freely, subject to the following restrictions: - * - * 1. The origin of this software must not be misrepresented; you must not - * claim that you wrote the original software. If you use this software - * in a product, an acknowledgment in the product documentation would be - * appreciated but is not required. - * 2. Altered source versions must be plainly marked as such, and must not be - * misrepresented as being the original software. - * 3. This notice may not be removed or altered from any source distribution. - */ - -#include "Python.h" - -#ifndef PYSQLITE_COMPAT_H -#define PYSQLITE_COMPAT_H - -/* define Py_ssize_t for pre-2.5 versions of Python */ - -#if PY_VERSION_HEX < 0x02050000 -typedef int Py_ssize_t; -typedef int (*lenfunc)(PyObject*); -#endif - - -/* define PyDict_CheckExact for pre-2.4 versions of Python */ -#ifndef PyDict_CheckExact -#define PyDict_CheckExact(op) ((op)->ob_type == &PyDict_Type) -#endif - -/* define Py_CLEAR for pre-2.4 versions of Python */ -#ifndef Py_CLEAR -#define Py_CLEAR(op) \ - do { \ - if (op) { \ - PyObject *tmp = (PyObject *)(op); \ - (op) = NULL; \ - Py_DECREF(tmp); \ - } \ - } while (0) -#endif - -#ifndef PyVarObject_HEAD_INIT -#define PyVarObject_HEAD_INIT(type, size) \ - PyObject_HEAD_INIT(type) size, -#endif - -#ifndef Py_TYPE -#define Py_TYPE(ob) ((ob)->ob_type) -#endif - -#endif -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 00:18:06 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 00:18:06 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Remove_last_bit_of_GNU_pth?= =?utf-8?q?_thread?= Message-ID: <3cGzSG0Q4pzR65@mail.python.org> http://hg.python.org/cpython/rev/1d5f644b9241 changeset: 85210:1d5f644b9241 user: Christian Heimes date: Sat Aug 17 00:17:58 2013 +0200 summary: Remove last bit of GNU pth thread PEP 11: Name: Systems using --with-pth (GNU pth threads) Unsupported in: Python 3.2 Code removed in: Python 3.3 files: Python/thread_pth.h | 177 -------------------------------- 1 files changed, 0 insertions(+), 177 deletions(-) diff --git a/Python/thread_pth.h b/Python/thread_pth.h deleted file mode 100644 --- a/Python/thread_pth.h +++ /dev/null @@ -1,177 +0,0 @@ - -/* GNU pth threads interface - http://www.gnu.org/software/pth - 2000-05-03 Andy Dustman - - Adapted from Posix threads interface - 12 May 1997 -- david arnold - */ - -#include -#include -#include - -/* A pth mutex isn't sufficient to model the Python lock type - * because pth mutexes can be acquired multiple times by the - * same thread. - * - * The pth_lock struct implements a Python lock as a "locked?" bit - * and a pair. In general, if the bit can be acquired - * instantly, it is, else the pair is used to block the thread until the - * bit is cleared. - */ - -typedef struct { - char locked; /* 0=unlocked, 1=locked */ - /* a pair to handle an acquire of a locked lock */ - pth_cond_t lock_released; - pth_mutex_t mut; -} pth_lock; - -#define CHECK_STATUS(name) if (status == -1) { printf("%d ", status); perror(name); error = 1; } - -pth_attr_t PyThread_attr; - -/* - * Initialization. - */ - -static void PyThread__init_thread(void) -{ - pth_init(); - PyThread_attr = pth_attr_new(); - pth_attr_set(PyThread_attr, PTH_ATTR_STACK_SIZE, 1<<18); - pth_attr_set(PyThread_attr, PTH_ATTR_JOINABLE, FALSE); -} - -/* - * Thread support. - */ - - -long PyThread_start_new_thread(void (*func)(void *), void *arg) -{ - pth_t th; - dprintf(("PyThread_start_new_thread called\n")); - if (!initialized) - PyThread_init_thread(); - - th = pth_spawn(PyThread_attr, - (void* (*)(void *))func, - (void *)arg - ); - - return th; -} - -long PyThread_get_thread_ident(void) -{ - volatile pth_t threadid; - if (!initialized) - PyThread_init_thread(); - threadid = pth_self(); - return (long) threadid; -} - -void PyThread_exit_thread(void) -{ - dprintf(("PyThread_exit_thread called\n")); - if (!initialized) { - exit(0); - } -} - -/* - * Lock support. - */ -PyThread_type_lock PyThread_allocate_lock(void) -{ - pth_lock *lock; - int status, error = 0; - - dprintf(("PyThread_allocate_lock called\n")); - if (!initialized) - PyThread_init_thread(); - - lock = (pth_lock *) malloc(sizeof(pth_lock)); - memset((void *)lock, '\0', sizeof(pth_lock)); - if (lock) { - lock->locked = 0; - status = pth_mutex_init(&lock->mut); - CHECK_STATUS("pth_mutex_init"); - status = pth_cond_init(&lock->lock_released); - CHECK_STATUS("pth_cond_init"); - if (error) { - free((void *)lock); - lock = NULL; - } - } - dprintf(("PyThread_allocate_lock() -> %p\n", lock)); - return (PyThread_type_lock) lock; -} - -void PyThread_free_lock(PyThread_type_lock lock) -{ - pth_lock *thelock = (pth_lock *)lock; - - dprintf(("PyThread_free_lock(%p) called\n", lock)); - - free((void *)thelock); -} - -int PyThread_acquire_lock(PyThread_type_lock lock, int waitflag) -{ - int success; - pth_lock *thelock = (pth_lock *)lock; - int status, error = 0; - - dprintf(("PyThread_acquire_lock(%p, %d) called\n", lock, waitflag)); - - status = pth_mutex_acquire(&thelock->mut, !waitflag, NULL); - CHECK_STATUS("pth_mutex_acquire[1]"); - success = thelock->locked == 0; - if (success) thelock->locked = 1; - status = pth_mutex_release( &thelock->mut ); - CHECK_STATUS("pth_mutex_release[1]"); - - if ( !success && waitflag ) { - /* continue trying until we get the lock */ - - /* mut must be locked by me -- part of the condition - * protocol */ - status = pth_mutex_acquire( &thelock->mut, !waitflag, NULL ); - CHECK_STATUS("pth_mutex_acquire[2]"); - while ( thelock->locked ) { - status = pth_cond_await(&thelock->lock_released, - &thelock->mut, NULL); - CHECK_STATUS("pth_cond_await"); - } - thelock->locked = 1; - status = pth_mutex_release( &thelock->mut ); - CHECK_STATUS("pth_mutex_release[2]"); - success = 1; - } - if (error) success = 0; - dprintf(("PyThread_acquire_lock(%p, %d) -> %d\n", lock, waitflag, success)); - return success; -} - -void PyThread_release_lock(PyThread_type_lock lock) -{ - pth_lock *thelock = (pth_lock *)lock; - int status, error = 0; - - dprintf(("PyThread_release_lock(%p) called\n", lock)); - - status = pth_mutex_acquire( &thelock->mut, 0, NULL ); - CHECK_STATUS("pth_mutex_acquire[3]"); - - thelock->locked = 0; - - status = pth_mutex_release( &thelock->mut ); - CHECK_STATUS("pth_mutex_release[3]"); - - /* wake up someone (anyone, if any) waiting on the lock */ - status = pth_cond_notify( &thelock->lock_released, 0 ); - CHECK_STATUS("pth_cond_notify"); -} -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 01:11:13 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 01:11:13 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzA5?= =?utf-8?q?=3A_Fix_CVE-2013-4238=2E_The_SSL_module_now_handles_NULL_bytes?= Message-ID: <3cH0dY2X9bz7LjV@mail.python.org> http://hg.python.org/cpython/rev/c9f073e593b0 changeset: 85211:c9f073e593b0 branch: 3.3 parent: 85186:3105b78d3434 user: Christian Heimes date: Sat Aug 17 00:54:47 2013 +0200 summary: Issue #18709: Fix CVE-2013-4238. The SSL module now handles NULL bytes inside subjectAltName correctly. Formerly the module has used OpenSSL's GENERAL_NAME_print() function to get the string represention of ASN.1 strings for rfc822Name (email), dNSName (DNS) and uniformResourceIdentifier (URI). files: Lib/test/nullbytecert.pem | 90 +++++++++++++++++++++++++++ Lib/test/test_ssl.py | 29 ++++++++ Misc/NEWS | 6 + Modules/_ssl.c | 64 +++++++++++++++++- 4 files changed, 184 insertions(+), 5 deletions(-) diff --git a/Lib/test/nullbytecert.pem b/Lib/test/nullbytecert.pem new file mode 100644 --- /dev/null +++ b/Lib/test/nullbytecert.pem @@ -0,0 +1,90 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 0 (0x0) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev at python.org + Validity + Not Before: Aug 7 13:11:52 2013 GMT + Not After : Aug 7 13:12:52 2013 GMT + Subject: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev at python.org + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:b5:ea:ed:c9:fb:46:7d:6f:3b:76:80:dd:3a:f3: + 03:94:0b:a7:a6:db:ec:1d:df:ff:23:74:08:9d:97: + 16:3f:a3:a4:7b:3e:1b:0e:96:59:25:03:a7:26:e2: + 88:a9:cf:79:cd:f7:04:56:b0:ab:79:32:6e:59:c1: + 32:30:54:eb:58:a8:cb:91:f0:42:a5:64:27:cb:d4: + 56:31:88:52:ad:cf:bd:7f:f0:06:64:1f:cc:27:b8: + a3:8b:8c:f3:d8:29:1f:25:0b:f5:46:06:1b:ca:02: + 45:ad:7b:76:0a:9c:bf:bb:b9:ae:0d:16:ab:60:75: + ae:06:3e:9c:7c:31:dc:92:2f:29:1a:e0:4b:0c:91: + 90:6c:e9:37:c5:90:d7:2a:d7:97:15:a3:80:8f:5d: + 7b:49:8f:54:30:d4:97:2c:1c:5b:37:b5:ab:69:30: + 68:43:d3:33:78:4b:02:60:f5:3c:44:80:a1:8f:e7: + f0:0f:d1:5e:87:9e:46:cf:62:fc:f9:bf:0c:65:12: + f1:93:c8:35:79:3f:c8:ec:ec:47:f5:ef:be:44:d5: + ae:82:1e:2d:9a:9f:98:5a:67:65:e1:74:70:7c:cb: + d3:c2:ce:0e:45:49:27:dc:e3:2d:d4:fb:48:0e:2f: + 9e:77:b8:14:46:c0:c4:36:ca:02:ae:6a:91:8c:da: + 2f:85 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: critical + CA:FALSE + X509v3 Subject Key Identifier: + 88:5A:55:C0:52:FF:61:CD:52:A3:35:0F:EA:5A:9C:24:38:22:F7:5C + X509v3 Key Usage: + Digital Signature, Non Repudiation, Key Encipherment + X509v3 Subject Alternative Name: + ************************************************************* + WARNING: The values for DNS, email and URI are WRONG. OpenSSL + doesn't print the text after a NULL byte. + ************************************************************* + DNS:altnull.python.org, email:null at python.org, URI:http://null.python.org, IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1 + Signature Algorithm: sha1WithRSAEncryption + ac:4f:45:ef:7d:49:a8:21:70:8e:88:59:3e:d4:36:42:70:f5: + a3:bd:8b:d7:a8:d0:58:f6:31:4a:b1:a4:a6:dd:6f:d9:e8:44: + 3c:b6:0a:71:d6:7f:b1:08:61:9d:60:ce:75:cf:77:0c:d2:37: + 86:02:8d:5e:5d:f9:0f:71:b4:16:a8:c1:3d:23:1c:f1:11:b3: + 56:6e:ca:d0:8d:34:94:e6:87:2a:99:f2:ae:ae:cc:c2:e8:86: + de:08:a8:7f:c5:05:fa:6f:81:a7:82:e6:d0:53:9d:34:f4:ac: + 3e:40:fe:89:57:7a:29:a4:91:7e:0b:c6:51:31:e5:10:2f:a4: + 60:76:cd:95:51:1a:be:8b:a1:b0:fd:ad:52:bd:d7:1b:87:60: + d2:31:c7:17:c4:18:4f:2d:08:25:a3:a7:4f:b7:92:ca:e2:f5: + 25:f1:54:75:81:9d:b3:3d:61:a2:f7:da:ed:e1:c6:6f:2c:60: + 1f:d8:6f:c5:92:05:ab:c9:09:62:49:a9:14:ad:55:11:cc:d6: + 4a:19:94:99:97:37:1d:81:5f:8b:cf:a3:a8:96:44:51:08:3d: + 0b:05:65:12:eb:b6:70:80:88:48:72:4f:c6:c2:da:cf:cd:8e: + 5b:ba:97:2f:60:b4:96:56:49:5e:3a:43:76:63:04:be:2a:f6: + c1:ca:a9:94 +-----BEGIN CERTIFICATE----- +MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx +DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ +eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg +RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y +ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw +NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI +DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv +ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt +ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq +hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j +pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P +vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv +KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA +oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL +08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV +HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E +BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu +Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251 +bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA +AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9 +i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j +HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk +kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx +VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW +RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ= +-----END CERTIFICATE----- diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -55,6 +55,7 @@ WRONGCERT = data_file("XXXnonexisting.pem") BADKEY = data_file("badkey.pem") NOKIACERT = data_file("nokia.pem") +NULLBYTECERT = data_file("nullbytecert.pem") DHFILE = data_file("dh512.pem") BYTES_DHFILE = os.fsencode(DHFILE) @@ -162,6 +163,27 @@ ('DNS', 'projects.forum.nokia.com')) ) + def test_parse_cert_CVE_2013_4238(self): + p = ssl._ssl._test_decode_cert(NULLBYTECERT) + if support.verbose: + sys.stdout.write("\n" + pprint.pformat(p) + "\n") + subject = ((('countryName', 'US'),), + (('stateOrProvinceName', 'Oregon'),), + (('localityName', 'Beaverton'),), + (('organizationName', 'Python Software Foundation'),), + (('organizationalUnitName', 'Python Core Development'),), + (('commonName', 'null.python.org\x00example.org'),), + (('emailAddress', 'python-dev at python.org'),)) + self.assertEqual(p['subject'], subject) + self.assertEqual(p['issuer'], subject) + self.assertEqual(p['subjectAltName'], + (('DNS', 'altnull.python.org\x00example.com'), + ('email', 'null at python.org\x00user at example.org'), + ('URI', 'http://null.python.org\x00http://example.org'), + ('IP Address', '192.0.2.1'), + ('IP Address', '2001:DB8:0:0:0:0:0:1\n')) + ) + def test_DER_to_PEM(self): with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f: pem = f.read() @@ -294,6 +316,13 @@ fail(cert, 'foo.a.com') fail(cert, 'bar.foo.com') + # NULL bytes are bad, CVE-2013-4073 + cert = {'subject': ((('commonName', + 'null.python.org\x00example.org'),),)} + ok(cert, 'null.python.org\x00example.org') # or raise an error? + fail(cert, 'example.org') + fail(cert, 'null.python.org') + # Slightly fake real-world example cert = {'notAfter': 'Jun 26 21:41:46 2011 GMT', 'subject': ((('commonName', 'linuxfrz.org'),),), diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,12 @@ Library ------- +- Issue #18709: Fix CVE-2013-4238. The SSL module now handles NULL bytes + inside subjectAltName correctly. Formerly the module has used OpenSSL's + GENERAL_NAME_print() function to get the string represention of ASN.1 + strings for ``rfc822Name`` (email), ``dNSName`` (DNS) and + ``uniformResourceIdentifier`` (URI). + - Issue #18405: Improve the entropy of crypt.mksalt(). - Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -771,12 +771,14 @@ ext->value->length)); for(j = 0; j < sk_GENERAL_NAME_num(names); j++) { - /* get a rendering of each name in the set of names */ + int gntype; + ASN1_STRING *as = NULL; name = sk_GENERAL_NAME_value(names, j); - if (name->type == GEN_DIRNAME) { - + gntype = name-> type; + switch (gntype) { + case GEN_DIRNAME: /* we special-case DirName as a tuple of tuples of attributes */ @@ -798,11 +800,62 @@ goto fail; } PyTuple_SET_ITEM(t, 1, v); + break; - } else { + case GEN_EMAIL: + case GEN_DNS: + case GEN_URI: + /* GENERAL_NAME_print() doesn't handle NULL bytes in ASN1_string + correctly, CVE-2013-4238 */ + t = PyTuple_New(2); + if (t == NULL) + goto fail; + switch (gntype) { + case GEN_EMAIL: + v = PyUnicode_FromString("email"); + as = name->d.rfc822Name; + break; + case GEN_DNS: + v = PyUnicode_FromString("DNS"); + as = name->d.dNSName; + break; + case GEN_URI: + v = PyUnicode_FromString("URI"); + as = name->d.uniformResourceIdentifier; + break; + } + if (v == NULL) { + Py_DECREF(t); + goto fail; + } + PyTuple_SET_ITEM(t, 0, v); + v = PyUnicode_FromStringAndSize((char *)ASN1_STRING_data(as), + ASN1_STRING_length(as)); + if (v == NULL) { + Py_DECREF(t); + goto fail; + } + PyTuple_SET_ITEM(t, 1, v); + break; + default: /* for everything else, we use the OpenSSL print form */ - + switch (gntype) { + /* check for new general name type */ + case GEN_OTHERNAME: + case GEN_X400: + case GEN_EDIPARTY: + case GEN_IPADD: + case GEN_RID: + break; + default: + if (PyErr_WarnFormat(PyExc_RuntimeWarning, 1, + "Unknown general name type %d", + gntype) == -1) { + goto fail; + } + break; + } (void) BIO_reset(biobuf); GENERAL_NAME_print(biobuf, name); len = BIO_gets(biobuf, buf, sizeof(buf)-1); @@ -829,6 +882,7 @@ goto fail; } PyTuple_SET_ITEM(t, 1, v); + break; } /* and add that rendering to the list */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 01:11:14 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 01:11:14 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAobWVyZ2UgMy4zIC0+IDMuMyk6?= =?utf-8?q?_merge?= Message-ID: <3cH0dZ5p8Wz7LjW@mail.python.org> http://hg.python.org/cpython/rev/51ebb3780a17 changeset: 85212:51ebb3780a17 branch: 3.3 parent: 85211:c9f073e593b0 parent: 85206:5880c7d33e13 user: Christian Heimes date: Sat Aug 17 00:55:39 2013 +0200 summary: merge files: Doc/c-api/typeobj.rst | 2 +- Doc/library/gzip.rst | 2 +- Doc/library/mailbox.rst | 4 +- Doc/library/random.rst | 6 ++-- Doc/library/tempfile.rst | 2 +- Doc/library/unittest.mock.rst | 6 ++-- Doc/library/xml.dom.minidom.rst | 2 +- Lib/test/test_os.py | 20 ++++++++++++++++ Lib/test/test_shutil.py | 26 +++++++++++++++++++++ Lib/test/test_timeout.py | 19 ++++++++++++-- Misc/NEWS | 7 +++++ Python/random.c | 8 ++++- README | 7 +++++ 13 files changed, 94 insertions(+), 17 deletions(-) diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -192,7 +192,7 @@ An optional pointer to the instance print function. The print function is only called when the instance is printed to a *real* file; - when it is printed to a pseudo-file (like a :class:`StringIO` instance), the + when it is printed to a pseudo-file (like a :class:`io.StringIO` instance), the instance's :c:member:`~PyTypeObject.tp_repr` or :c:member:`~PyTypeObject.tp_str` function is called to convert it to a string. These are also called when the type's :c:member:`~PyTypeObject.tp_print` field is *NULL*. A type should never implement :c:member:`~PyTypeObject.tp_print` in a way that produces diff --git a/Doc/library/gzip.rst b/Doc/library/gzip.rst --- a/Doc/library/gzip.rst +++ b/Doc/library/gzip.rst @@ -62,7 +62,7 @@ value. The new class instance is based on *fileobj*, which can be a regular file, a - :class:`StringIO` object, or any other object which simulates a file. It + :class:`io.BytesIO` object, or any other object which simulates a file. It defaults to ``None``, in which case *filename* is opened to provide a file object. diff --git a/Doc/library/mailbox.rst b/Doc/library/mailbox.rst --- a/Doc/library/mailbox.rst +++ b/Doc/library/mailbox.rst @@ -674,8 +674,8 @@ In Babyl mailboxes, the headers of a message are not stored contiguously with the body of the message. To generate a file-like representation, the - headers and body are copied together into a :class:`StringIO` instance - (from the :mod:`StringIO` module), which has an API identical to that of a + headers and body are copied together into a :class:`io.BytesIO` instance, + which has an API identical to that of a file. As a result, the file-like object is truly independent of the underlying mailbox but does not save memory compared to a string representation. diff --git a/Doc/library/random.rst b/Doc/library/random.rst --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -45,9 +45,9 @@ .. warning:: - The generators of the :mod:`random` module should not be used for security - purposes. Use :func:`ssl.RAND_bytes` if you require a cryptographically - secure pseudorandom number generator. + The pseudo-random generators of this module should not be used for + security purposes. Use :func:`os.urandom` or :class:`SystemRandom` if + you require a cryptographically secure pseudo-random number generator. Bookkeeping functions: diff --git a/Doc/library/tempfile.rst b/Doc/library/tempfile.rst --- a/Doc/library/tempfile.rst +++ b/Doc/library/tempfile.rst @@ -82,7 +82,7 @@ causes the file to roll over to an on-disk file regardless of its size. The returned object is a file-like object whose :attr:`_file` attribute - is either a :class:`BytesIO` or :class:`StringIO` object (depending on + is either a :class:`io.BytesIO` or :class:`io.StringIO` object (depending on whether binary or text *mode* was specified) or a true file object, depending on whether :func:`rollover` has been called. This file-like object can be used in a :keyword:`with` statement, just like diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -1084,9 +1084,9 @@ ... TypeError: 'NonCallableMock' object is not callable -Another use case might be to replace an object with a `StringIO` instance: - - >>> from StringIO import StringIO +Another use case might be to replace an object with a `io.StringIO` instance: + + >>> from io import StringIO >>> def foo(): ... print 'Something' ... diff --git a/Doc/library/xml.dom.minidom.rst b/Doc/library/xml.dom.minidom.rst --- a/Doc/library/xml.dom.minidom.rst +++ b/Doc/library/xml.dom.minidom.rst @@ -55,7 +55,7 @@ .. function:: parseString(string, parser=None) Return a :class:`Document` that represents the *string*. This method creates a - :class:`StringIO` object for the string and passes that on to :func:`parse`. + :class:`io.StringIO` object for the string and passes that on to :func:`parse`. Both functions return a :class:`Document` object representing the content of the document. diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -28,6 +28,11 @@ import threading except ImportError: threading = None +try: + import resource +except ImportError: + resource = None + from test.script_helper import assert_python_ok with warnings.catch_warnings(): @@ -997,6 +1002,21 @@ data2 = self.get_urandom_subprocess(16) self.assertNotEqual(data1, data2) + @unittest.skipUnless(resource, "test requires the resource module") + def test_urandom_failure(self): + soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE) + resource.setrlimit(resource.RLIMIT_NOFILE, (1, hard_limit)) + try: + with self.assertRaises(OSError) as cm: + os.urandom(16) + self.assertEqual(cm.exception.errno, errno.EMFILE) + finally: + # We restore the old limit as soon as possible. If doing it + # using addCleanup(), code running in between would fail + # creating any file descriptor. + resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit)) + + @contextlib.contextmanager def _execvpe_mockup(defpath=None): """ diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -726,6 +726,32 @@ shutil.rmtree(src_dir) shutil.rmtree(os.path.dirname(dst_dir)) + def test_copytree_retains_permissions(self): + tmp_dir = tempfile.mkdtemp() + src_dir = os.path.join(tmp_dir, 'source') + os.mkdir(src_dir) + dst_dir = os.path.join(tmp_dir, 'destination') + self.addCleanup(shutil.rmtree, tmp_dir) + + os.chmod(src_dir, 0o777) + write_file((src_dir, 'permissive.txt'), '123') + os.chmod(os.path.join(src_dir, 'permissive.txt'), 0o777) + write_file((src_dir, 'restrictive.txt'), '456') + os.chmod(os.path.join(src_dir, 'restrictive.txt'), 0o600) + restrictive_subdir = tempfile.mkdtemp(dir=src_dir) + os.chmod(restrictive_subdir, 0o600) + + shutil.copytree(src_dir, dst_dir) + self.assertEquals(os.stat(src_dir).st_mode, os.stat(dst_dir).st_mode) + self.assertEquals(os.stat(os.path.join(src_dir, 'permissive.txt')).st_mode, + os.stat(os.path.join(dst_dir, 'permissive.txt')).st_mode) + self.assertEquals(os.stat(os.path.join(src_dir, 'restrictive.txt')).st_mode, + os.stat(os.path.join(dst_dir, 'restrictive.txt')).st_mode) + restrictive_subdir_dst = os.path.join(dst_dir, + os.path.split(restrictive_subdir)[1]) + self.assertEquals(os.stat(restrictive_subdir).st_mode, + os.stat(restrictive_subdir_dst).st_mode) + @unittest.skipUnless(hasattr(os, 'link'), 'requires os.link') def test_dont_copy_file_onto_link_to_itself(self): # Temporarily disable test on Windows. diff --git a/Lib/test/test_timeout.py b/Lib/test/test_timeout.py --- a/Lib/test/test_timeout.py +++ b/Lib/test/test_timeout.py @@ -1,5 +1,6 @@ """Unit tests for socket timeout feature.""" +import functools import unittest from test import support @@ -11,6 +12,18 @@ import socket + at functools.lru_cache() +def resolve_address(host, port): + """Resolve an (host, port) to an address. + + We must perform name resolution before timeout tests, otherwise it will be + performed by connect(). + """ + with support.transient_internet(host): + return socket.getaddrinfo(host, port, socket.AF_INET, + socket.SOCK_STREAM)[0][4] + + class CreationTestCase(unittest.TestCase): """Test case for socket.gettimeout() and socket.settimeout()""" @@ -132,7 +145,7 @@ def setUp(self): self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.addr_remote = ('www.python.org.', 80) + self.addr_remote = resolve_address('www.python.org.', 80) def tearDown(self): self.sock.close() @@ -142,7 +155,7 @@ # to a host that silently drops our packets. We can't simulate this # from Python because it's a function of the underlying TCP/IP stack. # So, the following Snakebite host has been defined: - blackhole = ('blackhole.snakebite.net', 56666) + blackhole = resolve_address('blackhole.snakebite.net', 56666) # Blackhole has been configured to silently drop any incoming packets. # No RSTs (for TCP) or ICMP UNREACH (for UDP/ICMP) will be sent back @@ -154,7 +167,7 @@ # to firewalling or general network configuration. In order to improve # our confidence in testing the blackhole, a corresponding 'whitehole' # has also been set up using one port higher: - whitehole = ('whitehole.snakebite.net', 56667) + whitehole = resolve_address('whitehole.snakebite.net', 56667) # This address has been configured to immediately drop any incoming # packets as well, but it does it respectfully with regards to the diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -72,6 +72,10 @@ strings for ``rfc822Name`` (email), ``dNSName`` (DNS) and ``uniformResourceIdentifier`` (URI). +- Issue #18756: Improve error reporting in os.urandom() when the failure + is due to something else than /dev/urandom not existing (for example, + exhausting the file descriptor limit). + - Issue #18405: Improve the entropy of crypt.mksalt(). - Issue #18676: Change 'positive' to 'non-negative' in queue.py put and get @@ -264,6 +268,9 @@ Tests ----- +- Issue #1666318: Add a test that shutil.copytree() retains directory + permissions. Patch by Catherine Devlin. + - Issue #18357: add tests for dictview set difference. Patch by Fraser Tweedale. diff --git a/Python/random.c b/Python/random.c --- a/Python/random.c +++ b/Python/random.c @@ -165,8 +165,12 @@ Py_END_ALLOW_THREADS if (fd < 0) { - PyErr_SetString(PyExc_NotImplementedError, - "/dev/urandom (or equivalent) not found"); + if (errno == ENOENT || errno == ENXIO || + errno == ENODEV || errno == EACCES) + PyErr_SetString(PyExc_NotImplementedError, + "/dev/urandom (or equivalent) not found"); + else + PyErr_SetFromErrno(PyExc_OSError); return -1; } diff --git a/README b/README --- a/README +++ b/README @@ -76,6 +76,13 @@ is primarily for documentation authors, translators, and people with special formatting requirements. +If you would like to contribute to the development of Python, relevant +documentation is available at: + + http://docs.python.org/devguide/ + +For information about building Python's documentation, refer to Doc/README.txt. + Converting From Python 2.x to 3.x --------------------------------- -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 01:11:16 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 01:11:16 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318709=3A_Fix_CVE-2013-4238=2E_The_SSL_module_no?= =?utf-8?q?w_handles_NULL_bytes?= Message-ID: <3cH0dc207Yz7Ljr@mail.python.org> http://hg.python.org/cpython/rev/7a0f398d1a5c changeset: 85213:7a0f398d1a5c parent: 85210:1d5f644b9241 parent: 85212:51ebb3780a17 user: Christian Heimes date: Sat Aug 17 00:58:00 2013 +0200 summary: Issue #18709: Fix CVE-2013-4238. The SSL module now handles NULL bytes inside subjectAltName correctly. Formerly the module has used OpenSSL's GENERAL_NAME_print() function to get the string represention of ASN.1 strings for rfc822Name (email), dNSName (DNS) and uniformResourceIdentifier (URI). files: Lib/test/nullbytecert.pem | 90 +++++++++++++++++++++++++++ Lib/test/test_ssl.py | 29 ++++++++ Misc/NEWS | 6 + Modules/_ssl.c | 68 ++++++++++++++++++-- 4 files changed, 186 insertions(+), 7 deletions(-) diff --git a/Lib/test/nullbytecert.pem b/Lib/test/nullbytecert.pem new file mode 100644 --- /dev/null +++ b/Lib/test/nullbytecert.pem @@ -0,0 +1,90 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 0 (0x0) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev at python.org + Validity + Not Before: Aug 7 13:11:52 2013 GMT + Not After : Aug 7 13:12:52 2013 GMT + Subject: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev at python.org + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:b5:ea:ed:c9:fb:46:7d:6f:3b:76:80:dd:3a:f3: + 03:94:0b:a7:a6:db:ec:1d:df:ff:23:74:08:9d:97: + 16:3f:a3:a4:7b:3e:1b:0e:96:59:25:03:a7:26:e2: + 88:a9:cf:79:cd:f7:04:56:b0:ab:79:32:6e:59:c1: + 32:30:54:eb:58:a8:cb:91:f0:42:a5:64:27:cb:d4: + 56:31:88:52:ad:cf:bd:7f:f0:06:64:1f:cc:27:b8: + a3:8b:8c:f3:d8:29:1f:25:0b:f5:46:06:1b:ca:02: + 45:ad:7b:76:0a:9c:bf:bb:b9:ae:0d:16:ab:60:75: + ae:06:3e:9c:7c:31:dc:92:2f:29:1a:e0:4b:0c:91: + 90:6c:e9:37:c5:90:d7:2a:d7:97:15:a3:80:8f:5d: + 7b:49:8f:54:30:d4:97:2c:1c:5b:37:b5:ab:69:30: + 68:43:d3:33:78:4b:02:60:f5:3c:44:80:a1:8f:e7: + f0:0f:d1:5e:87:9e:46:cf:62:fc:f9:bf:0c:65:12: + f1:93:c8:35:79:3f:c8:ec:ec:47:f5:ef:be:44:d5: + ae:82:1e:2d:9a:9f:98:5a:67:65:e1:74:70:7c:cb: + d3:c2:ce:0e:45:49:27:dc:e3:2d:d4:fb:48:0e:2f: + 9e:77:b8:14:46:c0:c4:36:ca:02:ae:6a:91:8c:da: + 2f:85 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: critical + CA:FALSE + X509v3 Subject Key Identifier: + 88:5A:55:C0:52:FF:61:CD:52:A3:35:0F:EA:5A:9C:24:38:22:F7:5C + X509v3 Key Usage: + Digital Signature, Non Repudiation, Key Encipherment + X509v3 Subject Alternative Name: + ************************************************************* + WARNING: The values for DNS, email and URI are WRONG. OpenSSL + doesn't print the text after a NULL byte. + ************************************************************* + DNS:altnull.python.org, email:null at python.org, URI:http://null.python.org, IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1 + Signature Algorithm: sha1WithRSAEncryption + ac:4f:45:ef:7d:49:a8:21:70:8e:88:59:3e:d4:36:42:70:f5: + a3:bd:8b:d7:a8:d0:58:f6:31:4a:b1:a4:a6:dd:6f:d9:e8:44: + 3c:b6:0a:71:d6:7f:b1:08:61:9d:60:ce:75:cf:77:0c:d2:37: + 86:02:8d:5e:5d:f9:0f:71:b4:16:a8:c1:3d:23:1c:f1:11:b3: + 56:6e:ca:d0:8d:34:94:e6:87:2a:99:f2:ae:ae:cc:c2:e8:86: + de:08:a8:7f:c5:05:fa:6f:81:a7:82:e6:d0:53:9d:34:f4:ac: + 3e:40:fe:89:57:7a:29:a4:91:7e:0b:c6:51:31:e5:10:2f:a4: + 60:76:cd:95:51:1a:be:8b:a1:b0:fd:ad:52:bd:d7:1b:87:60: + d2:31:c7:17:c4:18:4f:2d:08:25:a3:a7:4f:b7:92:ca:e2:f5: + 25:f1:54:75:81:9d:b3:3d:61:a2:f7:da:ed:e1:c6:6f:2c:60: + 1f:d8:6f:c5:92:05:ab:c9:09:62:49:a9:14:ad:55:11:cc:d6: + 4a:19:94:99:97:37:1d:81:5f:8b:cf:a3:a8:96:44:51:08:3d: + 0b:05:65:12:eb:b6:70:80:88:48:72:4f:c6:c2:da:cf:cd:8e: + 5b:ba:97:2f:60:b4:96:56:49:5e:3a:43:76:63:04:be:2a:f6: + c1:ca:a9:94 +-----BEGIN CERTIFICATE----- +MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx +DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ +eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg +RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y +ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw +NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI +DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv +ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt +ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq +hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j +pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P +vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv +KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA +oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL +08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV +HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E +BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu +Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251 +bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA +AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9 +i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j +HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk +kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx +VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW +RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ= +-----END CERTIFICATE----- diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -56,6 +56,7 @@ WRONGCERT = data_file("XXXnonexisting.pem") BADKEY = data_file("badkey.pem") NOKIACERT = data_file("nokia.pem") +NULLBYTECERT = data_file("nullbytecert.pem") DHFILE = data_file("dh512.pem") BYTES_DHFILE = os.fsencode(DHFILE) @@ -176,6 +177,27 @@ ('DNS', 'projects.forum.nokia.com')) ) + def test_parse_cert_CVE_2013_4238(self): + p = ssl._ssl._test_decode_cert(NULLBYTECERT) + if support.verbose: + sys.stdout.write("\n" + pprint.pformat(p) + "\n") + subject = ((('countryName', 'US'),), + (('stateOrProvinceName', 'Oregon'),), + (('localityName', 'Beaverton'),), + (('organizationName', 'Python Software Foundation'),), + (('organizationalUnitName', 'Python Core Development'),), + (('commonName', 'null.python.org\x00example.org'),), + (('emailAddress', 'python-dev at python.org'),)) + self.assertEqual(p['subject'], subject) + self.assertEqual(p['issuer'], subject) + self.assertEqual(p['subjectAltName'], + (('DNS', 'altnull.python.org\x00example.com'), + ('email', 'null at python.org\x00user at example.org'), + ('URI', 'http://null.python.org\x00http://example.org'), + ('IP Address', '192.0.2.1'), + ('IP Address', '2001:DB8:0:0:0:0:0:1\n')) + ) + def test_DER_to_PEM(self): with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f: pem = f.read() @@ -308,6 +330,13 @@ fail(cert, 'foo.a.com') fail(cert, 'bar.foo.com') + # NULL bytes are bad, CVE-2013-4073 + cert = {'subject': ((('commonName', + 'null.python.org\x00example.org'),),)} + ok(cert, 'null.python.org\x00example.org') # or raise an error? + fail(cert, 'example.org') + fail(cert, 'null.python.org') + # Slightly fake real-world example cert = {'notAfter': 'Jun 26 21:41:46 2011 GMT', 'subject': ((('commonName', 'linuxfrz.org'),),), diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -28,6 +28,12 @@ Library ------- +- Issue #18709: Fix CVE-2013-4238. The SSL module now handles NULL bytes + inside subjectAltName correctly. Formerly the module has used OpenSSL's + GENERAL_NAME_print() function to get the string represention of ASN.1 + strings for ``rfc822Name`` (email), ``dNSName`` (DNS) and + ``uniformResourceIdentifier`` (URI). + - Issue #18701: Remove support of old CPython versions (<3.0) from C code. - Issue #18756: Improve error reporting in os.urandom() when the failure diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -805,12 +805,14 @@ ext->value->length)); for(j = 0; j < sk_GENERAL_NAME_num(names); j++) { - /* get a rendering of each name in the set of names */ + int gntype; + ASN1_STRING *as = NULL; name = sk_GENERAL_NAME_value(names, j); - if (name->type == GEN_DIRNAME) { - + gntype = name-> type; + switch (gntype) { + case GEN_DIRNAME: /* we special-case DirName as a tuple of tuples of attributes */ @@ -832,11 +834,62 @@ goto fail; } PyTuple_SET_ITEM(t, 1, v); - - } else { - + break; + + case GEN_EMAIL: + case GEN_DNS: + case GEN_URI: + /* GENERAL_NAME_print() doesn't handle NULL bytes in ASN1_string + correctly, CVE-2013-4238 */ + t = PyTuple_New(2); + if (t == NULL) + goto fail; + switch (gntype) { + case GEN_EMAIL: + v = PyUnicode_FromString("email"); + as = name->d.rfc822Name; + break; + case GEN_DNS: + v = PyUnicode_FromString("DNS"); + as = name->d.dNSName; + break; + case GEN_URI: + v = PyUnicode_FromString("URI"); + as = name->d.uniformResourceIdentifier; + break; + } + if (v == NULL) { + Py_DECREF(t); + goto fail; + } + PyTuple_SET_ITEM(t, 0, v); + v = PyUnicode_FromStringAndSize((char *)ASN1_STRING_data(as), + ASN1_STRING_length(as)); + if (v == NULL) { + Py_DECREF(t); + goto fail; + } + PyTuple_SET_ITEM(t, 1, v); + break; + + default: /* for everything else, we use the OpenSSL print form */ - + switch (gntype) { + /* check for new general name type */ + case GEN_OTHERNAME: + case GEN_X400: + case GEN_EDIPARTY: + case GEN_IPADD: + case GEN_RID: + break; + default: + if (PyErr_WarnFormat(PyExc_RuntimeWarning, 1, + "Unknown general name type %d", + gntype) == -1) { + goto fail; + } + break; + } (void) BIO_reset(biobuf); GENERAL_NAME_print(biobuf, name); len = BIO_gets(biobuf, buf, sizeof(buf)-1); @@ -863,6 +916,7 @@ goto fail; } PyTuple_SET_ITEM(t, 1, v); + break; } /* and add that rendering to the list */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 01:11:17 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 01:11:17 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NzA5?= =?utf-8?q?=3A_Fix_CVE-2013-4238=2E_The_SSL_module_now_handles_NULL_bytes?= Message-ID: <3cH0dd5B5hz7Ljx@mail.python.org> http://hg.python.org/cpython/rev/bd2360476bdb changeset: 85214:bd2360476bdb branch: 2.7 parent: 85201:87fcc13ade91 user: Christian Heimes date: Sat Aug 17 00:54:47 2013 +0200 summary: Issue #18709: Fix CVE-2013-4238. The SSL module now handles NULL bytes inside subjectAltName correctly. Formerly the module has used OpenSSL's GENERAL_NAME_print() function to get the string represention of ASN.1 strings for rfc822Name (email), dNSName (DNS) and uniformResourceIdentifier (URI). files: Lib/test/nullbytecert.pem | 90 +++++++++++++++++++++++++++ Lib/test/test_ssl.py | 29 ++++++++- Misc/NEWS | 6 + Modules/_ssl.c | 66 ++++++++++++++++++- 4 files changed, 183 insertions(+), 8 deletions(-) diff --git a/Lib/test/nullbytecert.pem b/Lib/test/nullbytecert.pem new file mode 100644 --- /dev/null +++ b/Lib/test/nullbytecert.pem @@ -0,0 +1,90 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 0 (0x0) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev at python.org + Validity + Not Before: Aug 7 13:11:52 2013 GMT + Not After : Aug 7 13:12:52 2013 GMT + Subject: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev at python.org + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:b5:ea:ed:c9:fb:46:7d:6f:3b:76:80:dd:3a:f3: + 03:94:0b:a7:a6:db:ec:1d:df:ff:23:74:08:9d:97: + 16:3f:a3:a4:7b:3e:1b:0e:96:59:25:03:a7:26:e2: + 88:a9:cf:79:cd:f7:04:56:b0:ab:79:32:6e:59:c1: + 32:30:54:eb:58:a8:cb:91:f0:42:a5:64:27:cb:d4: + 56:31:88:52:ad:cf:bd:7f:f0:06:64:1f:cc:27:b8: + a3:8b:8c:f3:d8:29:1f:25:0b:f5:46:06:1b:ca:02: + 45:ad:7b:76:0a:9c:bf:bb:b9:ae:0d:16:ab:60:75: + ae:06:3e:9c:7c:31:dc:92:2f:29:1a:e0:4b:0c:91: + 90:6c:e9:37:c5:90:d7:2a:d7:97:15:a3:80:8f:5d: + 7b:49:8f:54:30:d4:97:2c:1c:5b:37:b5:ab:69:30: + 68:43:d3:33:78:4b:02:60:f5:3c:44:80:a1:8f:e7: + f0:0f:d1:5e:87:9e:46:cf:62:fc:f9:bf:0c:65:12: + f1:93:c8:35:79:3f:c8:ec:ec:47:f5:ef:be:44:d5: + ae:82:1e:2d:9a:9f:98:5a:67:65:e1:74:70:7c:cb: + d3:c2:ce:0e:45:49:27:dc:e3:2d:d4:fb:48:0e:2f: + 9e:77:b8:14:46:c0:c4:36:ca:02:ae:6a:91:8c:da: + 2f:85 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: critical + CA:FALSE + X509v3 Subject Key Identifier: + 88:5A:55:C0:52:FF:61:CD:52:A3:35:0F:EA:5A:9C:24:38:22:F7:5C + X509v3 Key Usage: + Digital Signature, Non Repudiation, Key Encipherment + X509v3 Subject Alternative Name: + ************************************************************* + WARNING: The values for DNS, email and URI are WRONG. OpenSSL + doesn't print the text after a NULL byte. + ************************************************************* + DNS:altnull.python.org, email:null at python.org, URI:http://null.python.org, IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1 + Signature Algorithm: sha1WithRSAEncryption + ac:4f:45:ef:7d:49:a8:21:70:8e:88:59:3e:d4:36:42:70:f5: + a3:bd:8b:d7:a8:d0:58:f6:31:4a:b1:a4:a6:dd:6f:d9:e8:44: + 3c:b6:0a:71:d6:7f:b1:08:61:9d:60:ce:75:cf:77:0c:d2:37: + 86:02:8d:5e:5d:f9:0f:71:b4:16:a8:c1:3d:23:1c:f1:11:b3: + 56:6e:ca:d0:8d:34:94:e6:87:2a:99:f2:ae:ae:cc:c2:e8:86: + de:08:a8:7f:c5:05:fa:6f:81:a7:82:e6:d0:53:9d:34:f4:ac: + 3e:40:fe:89:57:7a:29:a4:91:7e:0b:c6:51:31:e5:10:2f:a4: + 60:76:cd:95:51:1a:be:8b:a1:b0:fd:ad:52:bd:d7:1b:87:60: + d2:31:c7:17:c4:18:4f:2d:08:25:a3:a7:4f:b7:92:ca:e2:f5: + 25:f1:54:75:81:9d:b3:3d:61:a2:f7:da:ed:e1:c6:6f:2c:60: + 1f:d8:6f:c5:92:05:ab:c9:09:62:49:a9:14:ad:55:11:cc:d6: + 4a:19:94:99:97:37:1d:81:5f:8b:cf:a3:a8:96:44:51:08:3d: + 0b:05:65:12:eb:b6:70:80:88:48:72:4f:c6:c2:da:cf:cd:8e: + 5b:ba:97:2f:60:b4:96:56:49:5e:3a:43:76:63:04:be:2a:f6: + c1:ca:a9:94 +-----BEGIN CERTIFICATE----- +MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx +DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ +eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg +RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y +ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw +NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI +DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv +ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt +ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq +hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j +pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P +vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv +KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA +oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL +08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV +HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E +BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu +Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251 +bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA +AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9 +i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j +HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk +kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx +VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW +RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ= +-----END CERTIFICATE----- diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -25,6 +25,7 @@ HOST = test_support.HOST CERTFILE = None SVN_PYTHON_ORG_ROOT_CERT = None +NULLBYTECERT = None def handle_error(prefix): exc_format = ' '.join(traceback.format_exception(*sys.exc_info())) @@ -123,6 +124,27 @@ ('DNS', 'projects.forum.nokia.com')) ) + def test_parse_cert_CVE_2013_4238(self): + p = ssl._ssl._test_decode_cert(NULLBYTECERT) + if test_support.verbose: + sys.stdout.write("\n" + pprint.pformat(p) + "\n") + subject = ((('countryName', 'US'),), + (('stateOrProvinceName', 'Oregon'),), + (('localityName', 'Beaverton'),), + (('organizationName', 'Python Software Foundation'),), + (('organizationalUnitName', 'Python Core Development'),), + (('commonName', 'null.python.org\x00example.org'),), + (('emailAddress', 'python-dev at python.org'),)) + self.assertEqual(p['subject'], subject) + self.assertEqual(p['issuer'], subject) + self.assertEqual(p['subjectAltName'], + (('DNS', 'altnull.python.org\x00example.com'), + ('email', 'null at python.org\x00user at example.org'), + ('URI', 'http://null.python.org\x00http://example.org'), + ('IP Address', '192.0.2.1'), + ('IP Address', '2001:DB8:0:0:0:0:0:1\n')) + ) + def test_DER_to_PEM(self): with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f: pem = f.read() @@ -1360,7 +1382,7 @@ def test_main(verbose=False): - global CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, NOKIACERT + global CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, NOKIACERT, NULLBYTECERT CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert.pem") SVN_PYTHON_ORG_ROOT_CERT = os.path.join( @@ -1368,10 +1390,13 @@ "https_svn_python_org_root.pem") NOKIACERT = os.path.join(os.path.dirname(__file__) or os.curdir, "nokia.pem") + NULLBYTECERT = os.path.join(os.path.dirname(__file__) or os.curdir, + "nullbytecert.pem") if (not os.path.exists(CERTFILE) or not os.path.exists(SVN_PYTHON_ORG_ROOT_CERT) or - not os.path.exists(NOKIACERT)): + not os.path.exists(NOKIACERT) or + not os.path.exists(NULLBYTECERT)): raise test_support.TestFailed("Can't read certificate files!") tests = [BasicTests, BasicSocketTests] diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,12 @@ Library ------- +- Issue #18709: Fix CVE-2013-4238. The SSL module now handles NULL bytes + inside subjectAltName correctly. Formerly the module has used OpenSSL's + GENERAL_NAME_print() function to get the string represention of ASN.1 + strings for ``rfc822Name`` (email), ``dNSName`` (DNS) and + ``uniformResourceIdentifier`` (URI). + - Issue #18756: Improve error reporting in os.urandom() when the failure is due to something else than /dev/urandom not existing (for example, exhausting the file descriptor limit). diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -738,13 +738,16 @@ ext->value->length)); for(j = 0; j < sk_GENERAL_NAME_num(names); j++) { - /* get a rendering of each name in the set of names */ + int gntype; + ASN1_STRING *as = NULL; name = sk_GENERAL_NAME_value(names, j); - if (name->type == GEN_DIRNAME) { - - /* we special-case DirName as a tuple of tuples of attributes */ + gntype = name-> type; + switch (gntype) { + case GEN_DIRNAME: + /* we special-case DirName as a tuple of + tuples of attributes */ t = PyTuple_New(2); if (t == NULL) { @@ -764,11 +767,61 @@ goto fail; } PyTuple_SET_ITEM(t, 1, v); + break; - } else { + case GEN_EMAIL: + case GEN_DNS: + case GEN_URI: + /* GENERAL_NAME_print() doesn't handle NULL bytes in ASN1_string + correctly, CVE-2013-4238 */ + t = PyTuple_New(2); + if (t == NULL) + goto fail; + switch (gntype) { + case GEN_EMAIL: + v = PyString_FromString("email"); + as = name->d.rfc822Name; + break; + case GEN_DNS: + v = PyString_FromString("DNS"); + as = name->d.dNSName; + break; + case GEN_URI: + v = PyString_FromString("URI"); + as = name->d.uniformResourceIdentifier; + break; + } + if (v == NULL) { + Py_DECREF(t); + goto fail; + } + PyTuple_SET_ITEM(t, 0, v); + v = PyString_FromStringAndSize((char *)ASN1_STRING_data(as), + ASN1_STRING_length(as)); + if (v == NULL) { + Py_DECREF(t); + goto fail; + } + PyTuple_SET_ITEM(t, 1, v); + break; + default: /* for everything else, we use the OpenSSL print form */ - + switch (gntype) { + /* check for new general name type */ + case GEN_OTHERNAME: + case GEN_X400: + case GEN_EDIPARTY: + case GEN_IPADD: + case GEN_RID: + break; + default: + if (PyErr_Warn(PyExc_RuntimeWarning, + "Unknown general name type") == -1) { + goto fail; + } + break; + } (void) BIO_reset(biobuf); GENERAL_NAME_print(biobuf, name); len = BIO_gets(biobuf, buf, sizeof(buf)-1); @@ -794,6 +847,7 @@ goto fail; } PyTuple_SET_ITEM(t, 1, v); + break; } /* and add that rendering to the list */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 01:40:57 2013 From: python-checkins at python.org (vinay.sajip) Date: Sat, 17 Aug 2013 01:40:57 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NzU5?= =?utf-8?q?=3A_Improved_cross-references_in_logging_documentation=2E?= Message-ID: <3cH1Hs6GBFzRTR@mail.python.org> http://hg.python.org/cpython/rev/aea67c35e3ce changeset: 85215:aea67c35e3ce branch: 2.7 user: Vinay Sajip date: Sat Aug 17 00:38:48 2013 +0100 summary: Issue #18759: Improved cross-references in logging documentation. files: Doc/howto/logging-cookbook.rst | 55 ++++++++-------- Doc/howto/logging.rst | 64 ++++++++++--------- Doc/library/logging.config.rst | 14 ++- Doc/library/logging.handlers.rst | 14 ++- 4 files changed, 78 insertions(+), 69 deletions(-) diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst --- a/Doc/howto/logging-cookbook.rst +++ b/Doc/howto/logging-cookbook.rst @@ -97,11 +97,11 @@ Multiple handlers and formatters -------------------------------- -Loggers are plain Python objects. The :func:`addHandler` method has no minimum -or maximum quota for the number of handlers you may add. Sometimes it will be -beneficial for an application to log all messages of all severities to a text -file while simultaneously logging errors or above to the console. To set this -up, simply configure the appropriate handlers. The logging calls in the +Loggers are plain Python objects. The :meth:`~Logger.addHandler` method has no +minimum or maximum quota for the number of handlers you may add. Sometimes it +will be beneficial for an application to log all messages of all severities to a +text file while simultaneously logging errors or above to the console. To set +this up, simply configure the appropriate handlers. The logging calls in the application code will remain unchanged. Here is a slight modification to the previous simple module-based configuration example:: @@ -395,8 +395,9 @@ Note that there are some security issues with pickle in some scenarios. If these affect you, you can use an alternative serialization scheme by overriding -the :meth:`makePickle` method and implementing your alternative there, as -well as adapting the above script to use your alternative serialization. +the :meth:`~handlers.SocketHandler.makePickle` method and implementing your +alternative there, as well as adapting the above script to use your alternative +serialization. .. _context-info: @@ -404,6 +405,8 @@ Adding contextual information to your logging output ---------------------------------------------------- +.. currentmodule:: logging + Sometimes you want logging output to contain contextual information in addition to the parameters passed to the logging call. For example, in a networked application, it may be desirable to log client-specific information @@ -445,9 +448,9 @@ msg, kwargs = self.process(msg, kwargs) self.logger.debug(msg, *args, **kwargs) -The :meth:`process` method of :class:`LoggerAdapter` is where the contextual -information is added to the logging output. It's passed the message and -keyword arguments of the logging call, and it passes back (potentially) +The :meth:`~LoggerAdapter.process` method of :class:`LoggerAdapter` is where the +contextual information is added to the logging output. It's passed the message +and keyword arguments of the logging call, and it passes back (potentially) modified versions of these to use in the call to the underlying logger. The default implementation of this method leaves the message alone, but inserts an 'extra' key in the keyword argument whose value is the dict-like object @@ -459,8 +462,8 @@ customized strings with your :class:`Formatter` instances which know about the keys of the dict-like object. If you need a different method, e.g. if you want to prepend or append the contextual information to the message string, -you just need to subclass :class:`LoggerAdapter` and override :meth:`process` -to do what you need. Here is a simple example:: +you just need to subclass :class:`LoggerAdapter` and override +:meth:`~LoggerAdapter.process` to do what you need. Here is a simple example:: class CustomAdapter(logging.LoggerAdapter): """ @@ -569,25 +572,23 @@ *multiple processes* is *not* supported, because there is no standard way to serialize access to a single file across multiple processes in Python. If you need to log to a single file from multiple processes, one way of doing this is -to have all the processes log to a :class:`SocketHandler`, and have a separate -process which implements a socket server which reads from the socket and logs -to file. (If you prefer, you can dedicate one thread in one of the existing -processes to perform this function.) :ref:`This section ` -documents this approach in more detail and includes a working socket receiver -which can be used as a starting point for you to adapt in your own -applications. +to have all the processes log to a :class:`~handlers.SocketHandler`, and have a +separate process which implements a socket server which reads from the socket +and logs to file. (If you prefer, you can dedicate one thread in one of the +existing processes to perform this function.) +:ref:`This section ` documents this approach in more detail and +includes a working socket receiver which can be used as a starting point for you +to adapt in your own applications. If you are using a recent version of Python which includes the :mod:`multiprocessing` module, you could write your own handler which uses the -:class:`Lock` class from this module to serialize access to the file from -your processes. The existing :class:`FileHandler` and subclasses do not make -use of :mod:`multiprocessing` at present, though they may do so in the future. -Note that at present, the :mod:`multiprocessing` module does not provide +:class:`~multiprocessing.Lock` class from this module to serialize access to the +file from your processes. The existing :class:`FileHandler` and subclasses do +not make use of :mod:`multiprocessing` at present, though they may do so in the +future. Note that at present, the :mod:`multiprocessing` module does not provide working lock functionality on all platforms (see http://bugs.python.org/issue3770). -.. currentmodule:: logging.handlers - Using file rotation ------------------- @@ -599,7 +600,7 @@ file and log to that. You may want to keep a certain number of these files, and when that many files have been created, rotate the files so that the number of files and the size of the files both remain bounded. For this usage pattern, the -logging package provides a :class:`RotatingFileHandler`:: +logging package provides a :class:`~handlers.RotatingFileHandler`:: import glob import logging @@ -650,7 +651,7 @@ Below is an example of a logging configuration dictionary - it's taken from the `documentation on the Django project `_. -This dictionary is passed to :func:`~logging.config.dictConfig` to put the configuration into effect:: +This dictionary is passed to :func:`~config.dictConfig` to put the configuration into effect:: LOGGING = { 'version': 1, diff --git a/Doc/howto/logging.rst b/Doc/howto/logging.rst --- a/Doc/howto/logging.rst +++ b/Doc/howto/logging.rst @@ -469,12 +469,13 @@ :class:`~logging.Handler` objects are responsible for dispatching the appropriate log messages (based on the log messages' severity) to the handler's -specified destination. Logger objects can add zero or more handler objects to -themselves with an :func:`addHandler` method. As an example scenario, an -application may want to send all log messages to a log file, all log messages -of error or higher to stdout, and all messages of critical to an email address. -This scenario requires three individual handlers where each handler is -responsible for sending messages of a specific severity to a specific location. +specified destination. :class:`Logger` objects can add zero or more handler +objects to themselves with an :meth:`~Logger.addHandler` method. As an example +scenario, an application may want to send all log messages to a log file, all +log messages of error or higher to stdout, and all messages of critical to an +email address. This scenario requires three individual handlers where each +handler is responsible for sending messages of a specific severity to a specific +location. The standard library includes quite a few handler types (see :ref:`useful-handlers`); the tutorials use mainly :class:`StreamHandler` and @@ -485,16 +486,17 @@ developers who are using the built-in handler objects (that is, not creating custom handlers) are the following configuration methods: -* The :meth:`Handler.setLevel` method, just as in logger objects, specifies the +* The :meth:`~Handler.setLevel` method, just as in logger objects, specifies the lowest severity that will be dispatched to the appropriate destination. Why are there two :func:`setLevel` methods? The level set in the logger determines which severity of messages it will pass to its handlers. The level set in each handler determines which messages that handler will send on. -* :func:`setFormatter` selects a Formatter object for this handler to use. +* :meth:`~Handler.setFormatter` selects a Formatter object for this handler to + use. -* :func:`addFilter` and :func:`removeFilter` respectively configure and - deconfigure filter objects on handlers. +* :meth:`~Handler.addFilter` and :meth:`~Handler.removeFilter` respectively + configure and deconfigure filter objects on handlers. Application code should not directly instantiate and use instances of :class:`Handler`. Instead, the :class:`Handler` class is a base class that @@ -918,16 +920,16 @@ use with the % operator and a dictionary. For formatting multiple messages in a batch, instances of -:class:`BufferingFormatter` can be used. In addition to the format string (which -is applied to each message in the batch), there is provision for header and -trailer format strings. +:class:`~handlers.BufferingFormatter` can be used. In addition to the format +string (which is applied to each message in the batch), there is provision for +header and trailer format strings. When filtering based on logger level and/or handler level is not enough, instances of :class:`Filter` can be added to both :class:`Logger` and -:class:`Handler` instances (through their :meth:`addFilter` method). Before -deciding to process a message further, both loggers and handlers consult all -their filters for permission. If any filter returns a false value, the message -is not processed further. +:class:`Handler` instances (through their :meth:`~Handler.addFilter` method). +Before deciding to process a message further, both loggers and handlers consult +all their filters for permission. If any filter returns a false value, the +message is not processed further. The basic :class:`Filter` functionality allows filtering by specific logger name. If this feature is used, messages sent to the named logger and its @@ -945,19 +947,20 @@ cause the application using logging to terminate prematurely. :class:`SystemExit` and :class:`KeyboardInterrupt` exceptions are never -swallowed. Other exceptions which occur during the :meth:`emit` method of a -:class:`Handler` subclass are passed to its :meth:`handleError` method. +swallowed. Other exceptions which occur during the :meth:`~Handler.emit` method +of a :class:`Handler` subclass are passed to its :meth:`~Handler.handleError` +method. -The default implementation of :meth:`handleError` in :class:`Handler` checks -to see if a module-level variable, :data:`raiseExceptions`, is set. If set, a -traceback is printed to :data:`sys.stderr`. If not set, the exception is swallowed. +The default implementation of :meth:`~Handler.handleError` in :class:`Handler` +checks to see if a module-level variable, :data:`raiseExceptions`, is set. If +set, a traceback is printed to :data:`sys.stderr`. If not set, the exception is +swallowed. .. note:: The default value of :data:`raiseExceptions` is ``True``. This is because during development, you typically want to be notified of any exceptions that occur. It's advised that you set :data:`raiseExceptions` to ``False`` for production usage. -.. currentmodule:: logging .. _arbitrary-object-messages: @@ -967,11 +970,11 @@ In the preceding sections and examples, it has been assumed that the message passed when logging the event is a string. However, this is not the only possibility. You can pass an arbitrary object as a message, and its -:meth:`__str__` method will be called when the logging system needs to convert -it to a string representation. In fact, if you want to, you can avoid +:meth:`~object.__str__` method will be called when the logging system needs to +convert it to a string representation. In fact, if you want to, you can avoid computing a string representation altogether - for example, the -:class:`SocketHandler` emits an event by pickling it and sending it over the -wire. +:class:`~handlers.SocketHandler` emits an event by pickling it and sending it +over the wire. Optimization @@ -980,9 +983,10 @@ Formatting of message arguments is deferred until it cannot be avoided. However, computing the arguments passed to the logging method can also be expensive, and you may want to avoid doing it if the logger will just throw -away your event. To decide what to do, you can call the :meth:`isEnabledFor` -method which takes a level argument and returns true if the event would be -created by the Logger for that level of call. You can write code like this:: +away your event. To decide what to do, you can call the +:meth:`~Logger.isEnabledFor` method which takes a level argument and returns +true if the event would be created by the Logger for that level of call. +You can write code like this:: if logger.isEnabledFor(logging.DEBUG): logger.debug('Message with %s, %s', expensive_func1(), diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -104,8 +104,9 @@ configurations. If no port is specified, the module's default :const:`DEFAULT_LOGGING_CONFIG_PORT` is used. Logging configurations will be sent as a file suitable for processing by :func:`fileConfig`. Returns a - :class:`Thread` instance on which you can call :meth:`start` to start the - server, and which you can :meth:`join` when appropriate. To stop the server, + :class:`~threading.Thread` instance on which you can call + :meth:`~threading.Thread.start` to start the server, and which you can + :meth:`~threading.Thread.join` when appropriate. To stop the server, call :func:`stopListening`. To send a configuration to the socket, read in the configuration file and @@ -169,11 +170,11 @@ * *formatters* - the corresponding value will be a dict in which each key is a formatter id and each value is a dict describing how to - configure the corresponding Formatter instance. + configure the corresponding :class:`~logging.Formatter` instance. The configuring dict is searched for keys ``format`` and ``datefmt`` (with defaults of ``None``) and these are used to construct a - :class:`logging.Formatter` instance. + :class:`~logging.Formatter` instance. * *filters* - the corresponding value will be a dict in which each key is a filter id and each value is a dict describing how to configure @@ -711,8 +712,9 @@ The ``class`` entry is optional. It indicates the name of the formatter's class (as a dotted module and class name.) This option is useful for instantiating a -:class:`Formatter` subclass. Subclasses of :class:`Formatter` can present -exception tracebacks in an expanded or condensed format. +:class:`~logging.Formatter` subclass. Subclasses of +:class:`~logging.Formatter` can present exception tracebacks in an expanded or +condensed format. .. note:: Due to the use of :func:`eval` as described above, there are potential security risks which result from using the :func:`listen` to send diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -53,8 +53,8 @@ .. method:: flush() Flushes the stream by calling its :meth:`flush` method. Note that the - :meth:`close` method is inherited from :class:`Handler` and so does - no output, so an explicit :meth:`flush` call may be needed at times. + :meth:`close` method is inherited from :class:`~logging.Handler` and so + does no output, so an explicit :meth:`flush` call may be needed at times. .. _file-handler: @@ -142,8 +142,8 @@ This handler is not appropriate for use under Windows, because under Windows open log files cannot be moved or renamed - logging opens the files with exclusive locks - and so there is no need for such a handler. Furthermore, -*ST_INO* is not supported under Windows; :func:`stat` always returns zero for -this value. +*ST_INO* is not supported under Windows; :func:`~os.stat` always returns zero +for this value. .. class:: WatchedFileHandler(filename[,mode[, encoding[, delay]]]) @@ -305,7 +305,8 @@ binary format. If there is an error with the socket, silently drops the packet. If the connection was previously lost, re-establishes the connection. To unpickle the record at the receiving end into a - :class:`LogRecord`, use the :func:`makeLogRecord` function. + :class:`~logging.LogRecord`, use the :func:`~logging.makeLogRecord` + function. .. method:: handleError() @@ -383,7 +384,8 @@ Pickles the record's attribute dictionary and writes it to the socket in binary format. If there is an error with the socket, silently drops the packet. To unpickle the record at the receiving end into a - :class:`LogRecord`, use the :func:`makeLogRecord` function. + :class:`~logging.LogRecord`, use the :func:`~logging.makeLogRecord` + function. .. method:: makeSocket() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 01:40:59 2013 From: python-checkins at python.org (vinay.sajip) Date: Sat, 17 Aug 2013 01:40:59 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzU5?= =?utf-8?q?=3A_Improved_cross-references_in_logging_documentation=2E?= Message-ID: <3cH1Hv3LWGz7Ljj@mail.python.org> http://hg.python.org/cpython/rev/80c475617608 changeset: 85216:80c475617608 branch: 3.3 parent: 85212:51ebb3780a17 user: Vinay Sajip date: Sat Aug 17 00:39:42 2013 +0100 summary: Issue #18759: Improved cross-references in logging documentation. files: Doc/howto/logging-cookbook.rst | 51 ++++++++------- Doc/howto/logging.rst | 63 ++++++++++--------- Doc/library/logging.config.rst | 14 ++- Doc/library/logging.handlers.rst | 14 ++- 4 files changed, 76 insertions(+), 66 deletions(-) diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst --- a/Doc/howto/logging-cookbook.rst +++ b/Doc/howto/logging-cookbook.rst @@ -97,11 +97,11 @@ Multiple handlers and formatters -------------------------------- -Loggers are plain Python objects. The :func:`addHandler` method has no minimum -or maximum quota for the number of handlers you may add. Sometimes it will be -beneficial for an application to log all messages of all severities to a text -file while simultaneously logging errors or above to the console. To set this -up, simply configure the appropriate handlers. The logging calls in the +Loggers are plain Python objects. The :meth:`~Logger.addHandler` method has no +minimum or maximum quota for the number of handlers you may add. Sometimes it +will be beneficial for an application to log all messages of all severities to a +text file while simultaneously logging errors or above to the console. To set +this up, simply configure the appropriate handlers. The logging calls in the application code will remain unchanged. Here is a slight modification to the previous simple module-based configuration example:: @@ -459,8 +459,9 @@ Note that there are some security issues with pickle in some scenarios. If these affect you, you can use an alternative serialization scheme by overriding -the :meth:`makePickle` method and implementing your alternative there, as -well as adapting the above script to use your alternative serialization. +the :meth:`~handlers.SocketHandler.makePickle` method and implementing your +alternative there, as well as adapting the above script to use your alternative +serialization. .. _context-info: @@ -509,9 +510,9 @@ msg, kwargs = self.process(msg, kwargs) self.logger.debug(msg, *args, **kwargs) -The :meth:`process` method of :class:`LoggerAdapter` is where the contextual -information is added to the logging output. It's passed the message and -keyword arguments of the logging call, and it passes back (potentially) +The :meth:`~LoggerAdapter.process` method of :class:`LoggerAdapter` is where the +contextual information is added to the logging output. It's passed the message +and keyword arguments of the logging call, and it passes back (potentially) modified versions of these to use in the call to the underlying logger. The default implementation of this method leaves the message alone, but inserts an 'extra' key in the keyword argument whose value is the dict-like object @@ -523,8 +524,8 @@ customized strings with your :class:`Formatter` instances which know about the keys of the dict-like object. If you need a different method, e.g. if you want to prepend or append the contextual information to the message string, -you just need to subclass :class:`LoggerAdapter` and override :meth:`process` -to do what you need. Here is a simple example:: +you just need to subclass :class:`LoggerAdapter` and override +:meth:`~LoggerAdapter.process` to do what you need. Here is a simple example:: class CustomAdapter(logging.LoggerAdapter): """ @@ -633,20 +634,20 @@ *multiple processes* is *not* supported, because there is no standard way to serialize access to a single file across multiple processes in Python. If you need to log to a single file from multiple processes, one way of doing this is -to have all the processes log to a :class:`SocketHandler`, and have a separate -process which implements a socket server which reads from the socket and logs -to file. (If you prefer, you can dedicate one thread in one of the existing -processes to perform this function.) :ref:`This section ` -documents this approach in more detail and includes a working socket receiver -which can be used as a starting point for you to adapt in your own -applications. +to have all the processes log to a :class:`~handlers.SocketHandler`, and have a +separate process which implements a socket server which reads from the socket +and logs to file. (If you prefer, you can dedicate one thread in one of the +existing processes to perform this function.) +:ref:`This section ` documents this approach in more detail and +includes a working socket receiver which can be used as a starting point for you +to adapt in your own applications. If you are using a recent version of Python which includes the :mod:`multiprocessing` module, you could write your own handler which uses the -:class:`Lock` class from this module to serialize access to the file from -your processes. The existing :class:`FileHandler` and subclasses do not make -use of :mod:`multiprocessing` at present, though they may do so in the future. -Note that at present, the :mod:`multiprocessing` module does not provide +:class:`~multiprocessing.Lock` class from this module to serialize access to the +file from your processes. The existing :class:`FileHandler` and subclasses do +not make use of :mod:`multiprocessing` at present, though they may do so in the +future. Note that at present, the :mod:`multiprocessing` module does not provide working lock functionality on all platforms (see http://bugs.python.org/issue3770). @@ -880,7 +881,7 @@ file and log to that. You may want to keep a certain number of these files, and when that many files have been created, rotate the files so that the number of files and the size of the files both remain bounded. For this usage pattern, the -logging package provides a :class:`RotatingFileHandler`:: +logging package provides a :class:`~handlers.RotatingFileHandler`:: import glob import logging @@ -1254,7 +1255,7 @@ Below is an example of a logging configuration dictionary - it's taken from the `documentation on the Django project `_. -This dictionary is passed to :func:`~logging.config.dictConfig` to put the configuration into effect:: +This dictionary is passed to :func:`~config.dictConfig` to put the configuration into effect:: LOGGING = { 'version': 1, diff --git a/Doc/howto/logging.rst b/Doc/howto/logging.rst --- a/Doc/howto/logging.rst +++ b/Doc/howto/logging.rst @@ -469,12 +469,13 @@ :class:`~logging.Handler` objects are responsible for dispatching the appropriate log messages (based on the log messages' severity) to the handler's -specified destination. Logger objects can add zero or more handler objects to -themselves with an :func:`addHandler` method. As an example scenario, an -application may want to send all log messages to a log file, all log messages -of error or higher to stdout, and all messages of critical to an email address. -This scenario requires three individual handlers where each handler is -responsible for sending messages of a specific severity to a specific location. +specified destination. :class:`Logger` objects can add zero or more handler +objects to themselves with an :meth:`~Logger.addHandler` method. As an example +scenario, an application may want to send all log messages to a log file, all +log messages of error or higher to stdout, and all messages of critical to an +email address. This scenario requires three individual handlers where each +handler is responsible for sending messages of a specific severity to a specific +location. The standard library includes quite a few handler types (see :ref:`useful-handlers`); the tutorials use mainly :class:`StreamHandler` and @@ -485,16 +486,17 @@ developers who are using the built-in handler objects (that is, not creating custom handlers) are the following configuration methods: -* The :meth:`Handler.setLevel` method, just as in logger objects, specifies the +* The :meth:`~Handler.setLevel` method, just as in logger objects, specifies the lowest severity that will be dispatched to the appropriate destination. Why are there two :func:`setLevel` methods? The level set in the logger determines which severity of messages it will pass to its handlers. The level set in each handler determines which messages that handler will send on. -* :func:`setFormatter` selects a Formatter object for this handler to use. +* :meth:`~Handler.setFormatter` selects a Formatter object for this handler to + use. -* :func:`addFilter` and :func:`removeFilter` respectively configure and - deconfigure filter objects on handlers. +* :meth:`~Handler.addFilter` and :meth:`~Handler.removeFilter` respectively + configure and deconfigure filter objects on handlers. Application code should not directly instantiate and use instances of :class:`Handler`. Instead, the :class:`Handler` class is a base class that @@ -948,16 +950,16 @@ use with the % operator and a dictionary. For formatting multiple messages in a batch, instances of -:class:`BufferingFormatter` can be used. In addition to the format string (which -is applied to each message in the batch), there is provision for header and -trailer format strings. +:class:`~handlers.BufferingFormatter` can be used. In addition to the format +string (which is applied to each message in the batch), there is provision for +header and trailer format strings. When filtering based on logger level and/or handler level is not enough, instances of :class:`Filter` can be added to both :class:`Logger` and -:class:`Handler` instances (through their :meth:`addFilter` method). Before -deciding to process a message further, both loggers and handlers consult all -their filters for permission. If any filter returns a false value, the message -is not processed further. +:class:`Handler` instances (through their :meth:`~Handler.addFilter` method). +Before deciding to process a message further, both loggers and handlers consult +all their filters for permission. If any filter returns a false value, the +message is not processed further. The basic :class:`Filter` functionality allows filtering by specific logger name. If this feature is used, messages sent to the named logger and its @@ -975,12 +977,14 @@ cause the application using logging to terminate prematurely. :class:`SystemExit` and :class:`KeyboardInterrupt` exceptions are never -swallowed. Other exceptions which occur during the :meth:`emit` method of a -:class:`Handler` subclass are passed to its :meth:`handleError` method. +swallowed. Other exceptions which occur during the :meth:`~Handler.emit` method +of a :class:`Handler` subclass are passed to its :meth:`~Handler.handleError` +method. -The default implementation of :meth:`handleError` in :class:`Handler` checks -to see if a module-level variable, :data:`raiseExceptions`, is set. If set, a -traceback is printed to :data:`sys.stderr`. If not set, the exception is swallowed. +The default implementation of :meth:`~Handler.handleError` in :class:`Handler` +checks to see if a module-level variable, :data:`raiseExceptions`, is set. If +set, a traceback is printed to :data:`sys.stderr`. If not set, the exception is +swallowed. .. note:: The default value of :data:`raiseExceptions` is ``True``. This is because during development, you typically want to be notified of any @@ -997,11 +1001,11 @@ In the preceding sections and examples, it has been assumed that the message passed when logging the event is a string. However, this is not the only possibility. You can pass an arbitrary object as a message, and its -:meth:`__str__` method will be called when the logging system needs to convert -it to a string representation. In fact, if you want to, you can avoid +:meth:`~object.__str__` method will be called when the logging system needs to +convert it to a string representation. In fact, if you want to, you can avoid computing a string representation altogether - for example, the -:class:`SocketHandler` emits an event by pickling it and sending it over the -wire. +:class:`~handlers.SocketHandler` emits an event by pickling it and sending it +over the wire. Optimization @@ -1010,9 +1014,10 @@ Formatting of message arguments is deferred until it cannot be avoided. However, computing the arguments passed to the logging method can also be expensive, and you may want to avoid doing it if the logger will just throw -away your event. To decide what to do, you can call the :meth:`isEnabledFor` -method which takes a level argument and returns true if the event would be -created by the Logger for that level of call. You can write code like this:: +away your event. To decide what to do, you can call the +:meth:`~Logger.isEnabledFor` method which takes a level argument and returns +true if the event would be created by the Logger for that level of call. +You can write code like this:: if logger.isEnabledFor(logging.DEBUG): logger.debug('Message with %s, %s', expensive_func1(), diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -101,8 +101,9 @@ configurations. If no port is specified, the module's default :const:`DEFAULT_LOGGING_CONFIG_PORT` is used. Logging configurations will be sent as a file suitable for processing by :func:`fileConfig`. Returns a - :class:`Thread` instance on which you can call :meth:`start` to start the - server, and which you can :meth:`join` when appropriate. To stop the server, + :class:`~threading.Thread` instance on which you can call + :meth:`~threading.Thread.start` to start the server, and which you can + :meth:`~threading.Thread.join` when appropriate. To stop the server, call :func:`stopListening`. To send a configuration to the socket, read in the configuration file and @@ -166,11 +167,11 @@ * *formatters* - the corresponding value will be a dict in which each key is a formatter id and each value is a dict describing how to - configure the corresponding Formatter instance. + configure the corresponding :class:`~logging.Formatter` instance. The configuring dict is searched for keys ``format`` and ``datefmt`` (with defaults of ``None``) and these are used to construct a - :class:`logging.Formatter` instance. + :class:`~logging.Formatter` instance. * *filters* - the corresponding value will be a dict in which each key is a filter id and each value is a dict describing how to configure @@ -704,8 +705,9 @@ The ``class`` entry is optional. It indicates the name of the formatter's class (as a dotted module and class name.) This option is useful for instantiating a -:class:`Formatter` subclass. Subclasses of :class:`Formatter` can present -exception tracebacks in an expanded or condensed format. +:class:`~logging.Formatter` subclass. Subclasses of +:class:`~logging.Formatter` can present exception tracebacks in an expanded or +condensed format. .. note:: Due to the use of :func:`eval` as described above, there are potential security risks which result from using the :func:`listen` to send diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -53,8 +53,8 @@ .. method:: flush() Flushes the stream by calling its :meth:`flush` method. Note that the - :meth:`close` method is inherited from :class:`Handler` and so does - no output, so an explicit :meth:`flush` call may be needed at times. + :meth:`close` method is inherited from :class:`~logging.Handler` and so + does no output, so an explicit :meth:`flush` call may be needed at times. .. versionchanged:: 3.2 The ``StreamHandler`` class now has a ``terminator`` attribute, default @@ -145,8 +145,8 @@ This handler is not appropriate for use under Windows, because under Windows open log files cannot be moved or renamed - logging opens the files with exclusive locks - and so there is no need for such a handler. Furthermore, -*ST_INO* is not supported under Windows; :func:`stat` always returns zero for -this value. +*ST_INO* is not supported under Windows; :func:`~os.stat` always returns zero +for this value. .. class:: WatchedFileHandler(filename[,mode[, encoding[, delay]]]) @@ -383,7 +383,8 @@ binary format. If there is an error with the socket, silently drops the packet. If the connection was previously lost, re-establishes the connection. To unpickle the record at the receiving end into a - :class:`LogRecord`, use the :func:`makeLogRecord` function. + :class:`~logging.LogRecord`, use the :func:`~logging.makeLogRecord` + function. .. method:: handleError() @@ -461,7 +462,8 @@ Pickles the record's attribute dictionary and writes it to the socket in binary format. If there is an error with the socket, silently drops the packet. To unpickle the record at the receiving end into a - :class:`LogRecord`, use the :func:`makeLogRecord` function. + :class:`~logging.LogRecord`, use the :func:`~logging.makeLogRecord` + function. .. method:: makeSocket() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 01:41:01 2013 From: python-checkins at python.org (vinay.sajip) Date: Sat, 17 Aug 2013 01:41:01 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318759=3A_Merged_updates_from_3=2E3=2E?= Message-ID: <3cH1Hx0NHrz7Ljt@mail.python.org> http://hg.python.org/cpython/rev/9580f237139f changeset: 85217:9580f237139f parent: 85213:7a0f398d1a5c parent: 85216:80c475617608 user: Vinay Sajip date: Sat Aug 17 00:40:38 2013 +0100 summary: Issue #18759: Merged updates from 3.3. files: Doc/howto/logging-cookbook.rst | 51 ++++++++------- Doc/howto/logging.rst | 63 ++++++++++--------- Doc/library/logging.config.rst | 14 ++- Doc/library/logging.handlers.rst | 14 ++- 4 files changed, 76 insertions(+), 66 deletions(-) diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst --- a/Doc/howto/logging-cookbook.rst +++ b/Doc/howto/logging-cookbook.rst @@ -97,11 +97,11 @@ Multiple handlers and formatters -------------------------------- -Loggers are plain Python objects. The :func:`addHandler` method has no minimum -or maximum quota for the number of handlers you may add. Sometimes it will be -beneficial for an application to log all messages of all severities to a text -file while simultaneously logging errors or above to the console. To set this -up, simply configure the appropriate handlers. The logging calls in the +Loggers are plain Python objects. The :meth:`~Logger.addHandler` method has no +minimum or maximum quota for the number of handlers you may add. Sometimes it +will be beneficial for an application to log all messages of all severities to a +text file while simultaneously logging errors or above to the console. To set +this up, simply configure the appropriate handlers. The logging calls in the application code will remain unchanged. Here is a slight modification to the previous simple module-based configuration example:: @@ -459,8 +459,9 @@ Note that there are some security issues with pickle in some scenarios. If these affect you, you can use an alternative serialization scheme by overriding -the :meth:`makePickle` method and implementing your alternative there, as -well as adapting the above script to use your alternative serialization. +the :meth:`~handlers.SocketHandler.makePickle` method and implementing your +alternative there, as well as adapting the above script to use your alternative +serialization. .. _context-info: @@ -509,9 +510,9 @@ msg, kwargs = self.process(msg, kwargs) self.logger.debug(msg, *args, **kwargs) -The :meth:`process` method of :class:`LoggerAdapter` is where the contextual -information is added to the logging output. It's passed the message and -keyword arguments of the logging call, and it passes back (potentially) +The :meth:`~LoggerAdapter.process` method of :class:`LoggerAdapter` is where the +contextual information is added to the logging output. It's passed the message +and keyword arguments of the logging call, and it passes back (potentially) modified versions of these to use in the call to the underlying logger. The default implementation of this method leaves the message alone, but inserts an 'extra' key in the keyword argument whose value is the dict-like object @@ -523,8 +524,8 @@ customized strings with your :class:`Formatter` instances which know about the keys of the dict-like object. If you need a different method, e.g. if you want to prepend or append the contextual information to the message string, -you just need to subclass :class:`LoggerAdapter` and override :meth:`process` -to do what you need. Here is a simple example:: +you just need to subclass :class:`LoggerAdapter` and override +:meth:`~LoggerAdapter.process` to do what you need. Here is a simple example:: class CustomAdapter(logging.LoggerAdapter): """ @@ -633,20 +634,20 @@ *multiple processes* is *not* supported, because there is no standard way to serialize access to a single file across multiple processes in Python. If you need to log to a single file from multiple processes, one way of doing this is -to have all the processes log to a :class:`SocketHandler`, and have a separate -process which implements a socket server which reads from the socket and logs -to file. (If you prefer, you can dedicate one thread in one of the existing -processes to perform this function.) :ref:`This section ` -documents this approach in more detail and includes a working socket receiver -which can be used as a starting point for you to adapt in your own -applications. +to have all the processes log to a :class:`~handlers.SocketHandler`, and have a +separate process which implements a socket server which reads from the socket +and logs to file. (If you prefer, you can dedicate one thread in one of the +existing processes to perform this function.) +:ref:`This section ` documents this approach in more detail and +includes a working socket receiver which can be used as a starting point for you +to adapt in your own applications. If you are using a recent version of Python which includes the :mod:`multiprocessing` module, you could write your own handler which uses the -:class:`Lock` class from this module to serialize access to the file from -your processes. The existing :class:`FileHandler` and subclasses do not make -use of :mod:`multiprocessing` at present, though they may do so in the future. -Note that at present, the :mod:`multiprocessing` module does not provide +:class:`~multiprocessing.Lock` class from this module to serialize access to the +file from your processes. The existing :class:`FileHandler` and subclasses do +not make use of :mod:`multiprocessing` at present, though they may do so in the +future. Note that at present, the :mod:`multiprocessing` module does not provide working lock functionality on all platforms (see http://bugs.python.org/issue3770). @@ -878,7 +879,7 @@ file and log to that. You may want to keep a certain number of these files, and when that many files have been created, rotate the files so that the number of files and the size of the files both remain bounded. For this usage pattern, the -logging package provides a :class:`RotatingFileHandler`:: +logging package provides a :class:`~handlers.RotatingFileHandler`:: import glob import logging @@ -1252,7 +1253,7 @@ Below is an example of a logging configuration dictionary - it's taken from the `documentation on the Django project `_. -This dictionary is passed to :func:`~logging.config.dictConfig` to put the configuration into effect:: +This dictionary is passed to :func:`~config.dictConfig` to put the configuration into effect:: LOGGING = { 'version': 1, diff --git a/Doc/howto/logging.rst b/Doc/howto/logging.rst --- a/Doc/howto/logging.rst +++ b/Doc/howto/logging.rst @@ -469,12 +469,13 @@ :class:`~logging.Handler` objects are responsible for dispatching the appropriate log messages (based on the log messages' severity) to the handler's -specified destination. Logger objects can add zero or more handler objects to -themselves with an :func:`addHandler` method. As an example scenario, an -application may want to send all log messages to a log file, all log messages -of error or higher to stdout, and all messages of critical to an email address. -This scenario requires three individual handlers where each handler is -responsible for sending messages of a specific severity to a specific location. +specified destination. :class:`Logger` objects can add zero or more handler +objects to themselves with an :meth:`~Logger.addHandler` method. As an example +scenario, an application may want to send all log messages to a log file, all +log messages of error or higher to stdout, and all messages of critical to an +email address. This scenario requires three individual handlers where each +handler is responsible for sending messages of a specific severity to a specific +location. The standard library includes quite a few handler types (see :ref:`useful-handlers`); the tutorials use mainly :class:`StreamHandler` and @@ -485,16 +486,17 @@ developers who are using the built-in handler objects (that is, not creating custom handlers) are the following configuration methods: -* The :meth:`Handler.setLevel` method, just as in logger objects, specifies the +* The :meth:`~Handler.setLevel` method, just as in logger objects, specifies the lowest severity that will be dispatched to the appropriate destination. Why are there two :func:`setLevel` methods? The level set in the logger determines which severity of messages it will pass to its handlers. The level set in each handler determines which messages that handler will send on. -* :func:`setFormatter` selects a Formatter object for this handler to use. +* :meth:`~Handler.setFormatter` selects a Formatter object for this handler to + use. -* :func:`addFilter` and :func:`removeFilter` respectively configure and - deconfigure filter objects on handlers. +* :meth:`~Handler.addFilter` and :meth:`~Handler.removeFilter` respectively + configure and deconfigure filter objects on handlers. Application code should not directly instantiate and use instances of :class:`Handler`. Instead, the :class:`Handler` class is a base class that @@ -948,16 +950,16 @@ use with the % operator and a dictionary. For formatting multiple messages in a batch, instances of -:class:`BufferingFormatter` can be used. In addition to the format string (which -is applied to each message in the batch), there is provision for header and -trailer format strings. +:class:`~handlers.BufferingFormatter` can be used. In addition to the format +string (which is applied to each message in the batch), there is provision for +header and trailer format strings. When filtering based on logger level and/or handler level is not enough, instances of :class:`Filter` can be added to both :class:`Logger` and -:class:`Handler` instances (through their :meth:`addFilter` method). Before -deciding to process a message further, both loggers and handlers consult all -their filters for permission. If any filter returns a false value, the message -is not processed further. +:class:`Handler` instances (through their :meth:`~Handler.addFilter` method). +Before deciding to process a message further, both loggers and handlers consult +all their filters for permission. If any filter returns a false value, the +message is not processed further. The basic :class:`Filter` functionality allows filtering by specific logger name. If this feature is used, messages sent to the named logger and its @@ -975,12 +977,14 @@ cause the application using logging to terminate prematurely. :class:`SystemExit` and :class:`KeyboardInterrupt` exceptions are never -swallowed. Other exceptions which occur during the :meth:`emit` method of a -:class:`Handler` subclass are passed to its :meth:`handleError` method. +swallowed. Other exceptions which occur during the :meth:`~Handler.emit` method +of a :class:`Handler` subclass are passed to its :meth:`~Handler.handleError` +method. -The default implementation of :meth:`handleError` in :class:`Handler` checks -to see if a module-level variable, :data:`raiseExceptions`, is set. If set, a -traceback is printed to :data:`sys.stderr`. If not set, the exception is swallowed. +The default implementation of :meth:`~Handler.handleError` in :class:`Handler` +checks to see if a module-level variable, :data:`raiseExceptions`, is set. If +set, a traceback is printed to :data:`sys.stderr`. If not set, the exception is +swallowed. .. note:: The default value of :data:`raiseExceptions` is ``True``. This is because during development, you typically want to be notified of any @@ -997,11 +1001,11 @@ In the preceding sections and examples, it has been assumed that the message passed when logging the event is a string. However, this is not the only possibility. You can pass an arbitrary object as a message, and its -:meth:`__str__` method will be called when the logging system needs to convert -it to a string representation. In fact, if you want to, you can avoid +:meth:`~object.__str__` method will be called when the logging system needs to +convert it to a string representation. In fact, if you want to, you can avoid computing a string representation altogether - for example, the -:class:`SocketHandler` emits an event by pickling it and sending it over the -wire. +:class:`~handlers.SocketHandler` emits an event by pickling it and sending it +over the wire. Optimization @@ -1010,9 +1014,10 @@ Formatting of message arguments is deferred until it cannot be avoided. However, computing the arguments passed to the logging method can also be expensive, and you may want to avoid doing it if the logger will just throw -away your event. To decide what to do, you can call the :meth:`isEnabledFor` -method which takes a level argument and returns true if the event would be -created by the Logger for that level of call. You can write code like this:: +away your event. To decide what to do, you can call the +:meth:`~Logger.isEnabledFor` method which takes a level argument and returns +true if the event would be created by the Logger for that level of call. +You can write code like this:: if logger.isEnabledFor(logging.DEBUG): logger.debug('Message with %s, %s', expensive_func1(), diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -122,8 +122,9 @@ configurations. If no port is specified, the module's default :const:`DEFAULT_LOGGING_CONFIG_PORT` is used. Logging configurations will be sent as a file suitable for processing by :func:`fileConfig`. Returns a - :class:`Thread` instance on which you can call :meth:`start` to start the - server, and which you can :meth:`join` when appropriate. To stop the server, + :class:`~threading.Thread` instance on which you can call + :meth:`~threading.Thread.start` to start the server, and which you can + :meth:`~threading.Thread.join` when appropriate. To stop the server, call :func:`stopListening`. The ``verify`` argument, if specified, should be a callable which should @@ -203,11 +204,11 @@ * *formatters* - the corresponding value will be a dict in which each key is a formatter id and each value is a dict describing how to - configure the corresponding Formatter instance. + configure the corresponding :class:`~logging.Formatter` instance. The configuring dict is searched for keys ``format`` and ``datefmt`` (with defaults of ``None``) and these are used to construct a - :class:`logging.Formatter` instance. + :class:`~logging.Formatter` instance. * *filters* - the corresponding value will be a dict in which each key is a filter id and each value is a dict describing how to configure @@ -741,8 +742,9 @@ The ``class`` entry is optional. It indicates the name of the formatter's class (as a dotted module and class name.) This option is useful for instantiating a -:class:`Formatter` subclass. Subclasses of :class:`Formatter` can present -exception tracebacks in an expanded or condensed format. +:class:`~logging.Formatter` subclass. Subclasses of +:class:`~logging.Formatter` can present exception tracebacks in an expanded or +condensed format. .. note:: Due to the use of :func:`eval` as described above, there are potential security risks which result from using the :func:`listen` to send diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -53,8 +53,8 @@ .. method:: flush() Flushes the stream by calling its :meth:`flush` method. Note that the - :meth:`close` method is inherited from :class:`Handler` and so does - no output, so an explicit :meth:`flush` call may be needed at times. + :meth:`close` method is inherited from :class:`~logging.Handler` and so + does no output, so an explicit :meth:`flush` call may be needed at times. .. versionchanged:: 3.2 The ``StreamHandler`` class now has a ``terminator`` attribute, default @@ -145,8 +145,8 @@ This handler is not appropriate for use under Windows, because under Windows open log files cannot be moved or renamed - logging opens the files with exclusive locks - and so there is no need for such a handler. Furthermore, -*ST_INO* is not supported under Windows; :func:`stat` always returns zero for -this value. +*ST_INO* is not supported under Windows; :func:`~os.stat` always returns zero +for this value. .. class:: WatchedFileHandler(filename[,mode[, encoding[, delay]]]) @@ -389,7 +389,8 @@ binary format. If there is an error with the socket, silently drops the packet. If the connection was previously lost, re-establishes the connection. To unpickle the record at the receiving end into a - :class:`LogRecord`, use the :func:`makeLogRecord` function. + :class:`~logging.LogRecord`, use the :func:`~logging.makeLogRecord` + function. .. method:: handleError() @@ -467,7 +468,8 @@ Pickles the record's attribute dictionary and writes it to the socket in binary format. If there is an error with the socket, silently drops the packet. To unpickle the record at the receiving end into a - :class:`LogRecord`, use the :func:`makeLogRecord` function. + :class:`~logging.LogRecord`, use the :func:`~logging.makeLogRecord` + function. .. method:: makeSocket() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 04:27:24 2013 From: python-checkins at python.org (nick.coghlan) Date: Sat, 17 Aug 2013 04:27:24 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Latest_draft_of_PEP_449?= Message-ID: <3cH4zw2b7Sz7LjQ@mail.python.org> http://hg.python.org/peps/rev/7a5fc76dd7f7 changeset: 5060:7a5fc76dd7f7 user: Nick Coghlan date: Fri Aug 16 21:27:00 2013 -0500 summary: Latest draft of PEP 449 files: pep-0449.txt | 125 +++++++++++++++++++++++++++----------- 1 files changed, 87 insertions(+), 38 deletions(-) diff --git a/pep-0449.txt b/pep-0449.txt --- a/pep-0449.txt +++ b/pep-0449.txt @@ -1,5 +1,5 @@ PEP: 449 -Title: Removal of Official Public PyPI Mirrors +Title: Removal of the PyPI Mirror Auto Discovery and Naming Scheme Version: $Revision$ Last-Modified: $Date$ Author: Donald Stufft @@ -16,9 +16,9 @@ Abstract ======== -This PEP provides a path to deprecate and ultimately remove the official -public mirroring infrastructure for `PyPI`_. It does not propose the removal -of mirroring support in general. +This PEP provides a path to deprecate and ultimately remove the auto discovery +of PyPI mirrors as well as the hard coded naming scheme which requires +delegating a domain name under pypi.python.org to a third party. Rationale @@ -26,9 +26,10 @@ The PyPI mirroring infrastructure (defined in `PEP381`_) provides a means to mirror the content of PyPI used by the automatic installers. It also provides -a method for autodiscovery of mirrors and a consistent naming scheme. +a method for auto discovery of mirrors and a consistent naming scheme. -There are a number of problems with the official public mirrors: +There are a number of problems with the auto discovery protocol and the +naming scheme: * They give control over a \*.python.org domain name to a third party, allowing that third party to set or read cookies on the pypi.python.org and @@ -36,26 +37,31 @@ * The use of a sub domain of pypi.python.org means that the mirror operators will never be able to get a SSL certificate of their own, and giving them one for a python.org domain name is unlikely to happen. -* They are often out of date, most often by several hours to a few days, but - regularly several days and even months. -* With the introduction of the CDN on PyPI the public mirroring infrastructure - is not as important as it once was as the CDN is also a globally distributed - network of servers which will function even if PyPI is down. -* Although there is provisions in place for it, there is currently no known - installer which uses the authenticity checks discussed in `PEP381`_ which - means that any download from a mirror is subject to attack by a malicious - mirror operator, but further more due to the lack of TLS it also means that - any download from a mirror is also subject to a MITM attack. -* They have only ever been implemented by one installer (pip), and its - implementation, besides being insecure, has serious issues with performance - and is slated for removal with it's next release (1.5). +* The auto discovery uses an unauthenticated protocol (DNS). +* The lack of a TLS certificate on these domains means that clients can not + be sure that they have not been a victim of DNS poisoning or a MITM attack. +* The auto discovery protocol was designed to enable a client to automatically + select a mirror for use. This is no longer a requirement because the CDN + that PyPI is now using a globally distributed network of servers which will + automatically select one close to the client without any effort on the + clients part. +* The auto discovery protocol and use of the consistent naming scheme has only + ever been implemented by one installer (pip), and its implementation, besides + being insecure, has serious issues with performance and is slated for removal + with it's next release (1.5). +* While there are provisions in `PEP381`_ that would solve *some* of these + issues for a dedicated client it would not solve the issues that affect a + users browser. Additionally these provisions have not been implemented by + any installer to date. -Due to the number of issues, some of them very serious, and the CDN which more -or less provides much of the same benefits this PEP proposes to first -deprecate and then remove the public mirroring infrastructure. The ability to -mirror and the method of mirroring will not be affected and the existing -public mirrors are encouraged to acquire their own domains to host their -mirrors on if they wish to continue hosting them. +Due to the number of issues, some of them very serious, and the CDN which +provides most of the benefit of the auto discovery and consistent naming scheme +this PEP proposes to first deprecate and then remove the [a..z].pypi.python.org +names for mirrors and the last.pypi.python.org name for the auto discovery +protocol. The ability to mirror and the method of mirror will not be affected +and will continue to exist as written in `PEP381`_. Operators of existing +mirrors are encouraged to acquire their own domains and certificates to use for +their mirrors if they wish to continue hosting them. Plan for Deprecation & Removal @@ -66,24 +72,67 @@ direct users to external resources like http://www.pypi-mirrors.org/ to discover unofficial public mirrors if they wish to use one. -On October 1st, 2013, roughly 2 months from the date of this PEP, the DNS names -of the public mirrors ([a-g].pypi.python.org) will be changed to point back to -PyPI which will be modified to accept requests from those domains. At this -point in time the public mirrors will be considered deprecated. +Mirror operators, if they wish to continue operating their mirror, should +acquire a domain name to represent their mirror and, if they are able, a TLS +certificate. Once they have acquired a domain they should redirect their +assigned N.pypi.python.org domain name to their new domain. On Feb 15th, 2014 +the DNS entries for [a..z].pypi.python.org and last.pypi.python.org will be +removed. At any time prior to Feb 15th, 2014 a mirror operator may request +that their domain name be reclaimed by PyPI and pointed back at the master. -Then, roughly 2 months after the release of the first version of pip to have -mirroring support removed (currently slated for pip 1.5) the DNS entries for -[a-g].pypi.python.org and last.pypi.python.org will be removed and PyPI will -no longer accept requests at those domains. +Why Feb 15th, 2014 +------------------ -Unofficial Public or Private Mirrors -==================================== +The most critical decision of this PEP is the final cut off date. If the date +is too soon then it needlessly punishes people by forcing them to drop +everything to update their deployment scripts. If the date is too far away then +the extended period of time does not help with the migration effort and merely +puts off the migration until a later date. + +The date of Feb 15th, 2014 has been chosen because it is roughly 6 months from +the date of the PEP. This should ensure a lengthy period of time to enable +people to update their deployment procedures to point to the new domains names +without merely padding the cut off date. + + +Why the DNS entries must be removed +----------------------------------- + +While it would be possible to simply reclaim the domain names used in mirror +and direct them back at PyPI in order to prevent users from needing to update +configurations to point away from those domains this has a number of issues. + +* Anyone who currently has these names hard coded in their configuration has + them hard coded as HTTP. This means that by allowing these names to continue + resolving we make it simple for a MITM operator to attack users by rewriting + the redirect to HTTPS prior to giving it to the client. +* The overhead of maintaining several domains pointing at PyPI has proved + troublesome for the small number of N.pypi.python.org domains that have + already been reclaimed. They often times get mis-configured when things + change on the service which often leaves them broken for months at a time + until somebody notices. By leaving them in we leave users of these domains + open to random breakages which are less likely to get caught or noticed. +* People using these domains have explicitly chosen to use them for one reason + or another. One such reason may be because they do not wish to deploy from + a host located in a particular country. If these domains continue to resolve + but do not point at their existing locations we have silently removed this + choice from the existing users of those domains. + +That being said, removing the entries *will* require users who have modified +their configuration to either point back at the master (PyPI) or select a new +mirror name to point at. This is regarded as a regrettable requirement to +protect PyPI itself and the users of the mirrors from the attacks outlined +above or, at the very least, require them to make an informed decision about +the insecurity. + + +Public or Private Mirrors +========================= The mirroring protocol will continue to exist as defined in `PEP381`_ and -people are encouraged to utilize to host unofficial public and private mirrors -if they so desire. For operators of unofficial public or private mirrors the -recommended mirroring client is `Bandersnatch`_. +people are encouraged to to host public and private mirrors if they so desire. +The recommended mirroring client is `Bandersnatch`_. .. _PyPI: https://pypi.python.org/ -- Repository URL: http://hg.python.org/peps From solipsis at pitrou.net Sat Aug 17 06:05:31 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sat, 17 Aug 2013 06:05:31 +0200 Subject: [Python-checkins] Daily reference leaks (9580f237139f): sum=2 Message-ID: results for 9580f237139f on branch "default" -------------------------------------------- test_site leaked [0, -2, 2] references, sum=0 test_site leaked [0, -2, 4] memory blocks, sum=2 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflog5aOrF6', '-x'] From python-checkins at python.org Sat Aug 17 11:32:00 2013 From: python-checkins at python.org (raymond.hettinger) Date: Sat, 17 Aug 2013 11:32:00 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Use_a_known_unique_object_?= =?utf-8?q?for_the_dummy_entry=2E?= Message-ID: <3cHGPr73nVz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/2c9a2b588a89 changeset: 85218:2c9a2b588a89 user: Raymond Hettinger date: Sat Aug 17 02:31:53 2013 -0700 summary: Use a known unique object for the dummy entry. This lets us run PyObject_RichCompareBool() without first needing to check whether the entry is a dummy. files: Objects/setobject.c | 45 ++++++++++++++------------------ 1 files changed, 20 insertions(+), 25 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -91,32 +91,27 @@ if (entry->key == NULL || entry->key == key) return entry; - if (entry->key == dummy) - freeslot = entry; - else { - if (entry->hash == hash) { - startkey = entry->key; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp < 0) - return NULL; - if (table == so->table && entry->key == startkey) { - if (cmp > 0) - return entry; - } - else { - /* The compare did major nasty stuff to the - * set: start over. - */ - return set_lookkey(so, key, hash); - } + if (entry->hash == hash) { + startkey = entry->key; + Py_INCREF(startkey); + cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); + Py_DECREF(startkey); + if (cmp < 0) + return NULL; + if (table == so->table && entry->key == startkey) { + if (cmp > 0) + return entry; } - freeslot = NULL; + else { + /* Start over if the compare altered the set */ + return set_lookkey(so, key, hash); + } } - /* In the loop, key == dummy is by far (factor of 100s) the - least likely outcome, so test for that last. */ + freeslot = (entry->key == dummy) ? entry : NULL; + + /* In the loop, key == dummy is by far (factor of 100s) + the least likely outcome, so test for that last. */ for (perturb = hash; ; perturb >>= PERTURB_SHIFT) { i = i * 5 + perturb + 1; entry = &table[i & mask]; @@ -127,7 +122,7 @@ } if (entry->key == key) break; - if (entry->hash == hash && entry->key != dummy) { + if (entry->hash == hash) { startkey = entry->key; Py_INCREF(startkey); cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); @@ -1029,7 +1024,7 @@ PySetObject *so = NULL; if (dummy == NULL) { /* Auto-initialize dummy */ - dummy = PyUnicode_FromString(""); + dummy = _PyObject_New(&PyBaseObject_Type); if (dummy == NULL) return NULL; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 11:39:55 2013 From: python-checkins at python.org (raymond.hettinger) Date: Sat, 17 Aug 2013 11:39:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Remove_the_else-clause_bec?= =?utf-8?q?ause_the_conditions_are_no_longer_mutually_exclusive=2E?= Message-ID: <3cHGZz600czT0n@mail.python.org> http://hg.python.org/cpython/rev/f30b82d380b3 changeset: 85219:f30b82d380b3 user: Raymond Hettinger date: Sat Aug 17 02:39:46 2013 -0700 summary: Remove the else-clause because the conditions are no longer mutually exclusive. files: Objects/setobject.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -140,7 +140,7 @@ return set_lookkey(so, key, hash); } } - else if (entry->key == dummy && freeslot == NULL) + if (entry->key == dummy && freeslot == NULL) freeslot = entry; } return entry; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 14:32:17 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 14:32:17 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_cp_pep-0247=2Etxt_pep-0452=2E?= =?utf-8?q?txt?= Message-ID: <3cHLPs6Fvdz7LjQ@mail.python.org> http://hg.python.org/peps/rev/49f978a3ebfa changeset: 5061:49f978a3ebfa user: Christian Heimes date: Sat Aug 17 14:28:44 2013 +0200 summary: cp pep-0247.txt pep-0452.txt files: pep-0247.txt | 0 1 files changed, 0 insertions(+), 0 deletions(-) diff --git a/pep-0247.txt b/pep-0452.txt copy from pep-0247.txt copy to pep-0452.txt -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Aug 17 14:32:19 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 14:32:19 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_whitespace_cleanup?= Message-ID: <3cHLPv0jx1z7Ljk@mail.python.org> http://hg.python.org/peps/rev/cfaf87b53f02 changeset: 5062:cfaf87b53f02 user: Christian Heimes date: Sat Aug 17 14:29:35 2013 +0200 summary: whitespace cleanup files: pep-0452.txt | 22 +++++++++++----------- 1 files changed, 11 insertions(+), 11 deletions(-) diff --git a/pep-0452.txt b/pep-0452.txt --- a/pep-0452.txt +++ b/pep-0452.txt @@ -34,7 +34,7 @@ 'string' parameter, if supplied, will be immediately hashed into the object's starting state, as if obj.update(string) was called. - + After creating a hashing object, arbitrary strings can be fed into the object using its update() method, and the hash value can be obtained at any time by calling the object's digest() @@ -70,7 +70,7 @@ object is created, and this attribute must contain the selected size. Therefore None is *not* a legal value for this attribute. - + Hashing objects require the following methods: @@ -89,19 +89,19 @@ hexdigest() Return the hash value of this hashing object as a string - containing hexadecimal digits. Lowercase letters should be used + containing hexadecimal digits. Lowercase letters should be used for the digits 'a' through 'f'. Like the .digest() method, this method mustn't alter the object. - + update(string) Hash 'string' into the current state of the hashing object. update() can be called any number of times during a hashing object's lifetime. - Hashing modules can define additional module-level functions or + Hashing modules can define additional module-level functions or object methods and still be compliant with this specification. - + Here's an example, using a module named 'MD5': >>> from Crypto.Hash import MD5 @@ -110,11 +110,11 @@ 16 >>> m.update('abc') >>> m.digest() - '\x90\x01P\x98<\xd2O\xb0\xd6\x96?}(\xe1\x7fr' + '\x90\x01P\x98<\xd2O\xb0\xd6\x96?}(\xe1\x7fr' >>> m.hexdigest() - '900150983cd24fb0d6963f7d28e17f72' + '900150983cd24fb0d6963f7d28e17f72' >>> MD5.new('abc').digest() - '\x90\x01P\x98<\xd2O\xb0\xd6\x96?}(\xe1\x7fr' + '\x90\x01P\x98<\xd2O\xb0\xd6\x96?}(\xe1\x7fr' Rationale @@ -140,8 +140,8 @@ to place required parameters first, but that also means that the 'string' parameter moves from the first position to the second. It would be possible to get confused and pass a single argument to - a keyed hash, thinking that you're passing an initial string to an - unkeyed hash, but it doesn't seem worth making the interface + a keyed hash, thinking that you're passing an initial string to an + unkeyed hash, but it doesn't seem worth making the interface for keyed hashes more obscure to avoid this potential error. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Aug 17 14:32:20 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 14:32:20 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_Add_first_draft_of_=27API_for?= =?utf-8?q?_Cryptographic_Hash_Functions_v2=2E0=27?= Message-ID: <3cHLPw3Tn8z7Ljt@mail.python.org> http://hg.python.org/peps/rev/938afd2f47f2 changeset: 5063:938afd2f47f2 user: Christian Heimes date: Sat Aug 17 14:30:44 2013 +0200 summary: Add first draft of 'API for Cryptographic Hash Functions v2.0' files: pep-0452.txt | 110 ++++++++++++++++++++++++++++++++------ 1 files changed, 93 insertions(+), 17 deletions(-) diff --git a/pep-0452.txt b/pep-0452.txt --- a/pep-0452.txt +++ b/pep-0452.txt @@ -1,12 +1,13 @@ -PEP: 247 -Title: API for Cryptographic Hash Functions +PEP: 452 +Title: API for Cryptographic Hash Functions v2.0 Version: $Revision$ Last-Modified: $Date$ -Author: A.M. Kuchling -Status: Final +Author: A.M. Kuchling , Christian Heimes +Status: Draft Type: Informational -Created: 23-Mar-2001 -Post-History: 20-Sep-2001 +Created: 15-Aug-2013 +Post-History: +Replaces: 247 Abstract @@ -35,11 +36,19 @@ into the object's starting state, as if obj.update(string) was called. - After creating a hashing object, arbitrary strings can be fed + After creating a hashing object, arbitrary bytes can be fed into the object using its update() method, and the hash value can be obtained at any time by calling the object's digest() method. + Although the parameter is called 'string', hashing objects operate + on 8-bit data only. Both 'key' and 'string' must be a bytes-like + object (bytes, bytearray...). A hashing object may support + one-dimensional, contiguous buffers as argument, too. Text + (unicode) is no longer supported in Python 3.x. Python 2.x + implementations may take ASCII-only unicode as argument, but + portable code should not rely on the feature. + Arbitrary additional keyword arguments can be added to this function, but if they're not supplied, sensible default values should be used. For example, 'rounds' and 'digest_size' @@ -59,7 +68,7 @@ Hashes with a variable output size will set this variable to None. - Hashing objects require a single attribute: + Hashing objects require the following attribute: digest_size @@ -71,6 +80,20 @@ selected size. Therefore None is *not* a legal value for this attribute. + block_size + + An integer value or ``NotImplemented``; the internal block size + of the hash algorithm in bytes. The block size is used by the + HMAC module to pad the secret key to digest_size or to hash the + secret key if it is longer than digest_size. If no HMAC + algorithm is standardized for the the hash algorithm, return + ``NotImplemented`` instead. + + name + + A text string value; the canonical, lowercase name of the hashing + algorithm. The name should be a suitable parameter for + :func:`hashlib.new`. Hashing objects require the following methods: @@ -81,7 +104,7 @@ digest() - Return the hash value of this hashing object as a string + Return the hash value of this hashing object as a bytes containing 8-bit data. The object is not altered in any way by this function; you can continue updating the object after calling this function. @@ -95,26 +118,33 @@ update(string) - Hash 'string' into the current state of the hashing object. - update() can be called any number of times during a hashing - object's lifetime. + Hash bytes-like 'string' into the current state of the hashing + object. update() can be called any number of times during a + hashing object's lifetime. Hashing modules can define additional module-level functions or object methods and still be compliant with this specification. Here's an example, using a module named 'MD5': + >>> import hashlib >>> from Crypto.Hash import MD5 >>> m = MD5.new() + >>> isinstance(m, hashlib.CryptoHash) + True + >>> m.name + 'md5' >>> m.digest_size 16 - >>> m.update('abc') + >>> m.block_size + 64 + >>> m.update(b'abc') >>> m.digest() - '\x90\x01P\x98<\xd2O\xb0\xd6\x96?}(\xe1\x7fr' + b'\x90\x01P\x98<\xd2O\xb0\xd6\x96?}(\xe1\x7fr' >>> m.hexdigest() '900150983cd24fb0d6963f7d28e17f72' - >>> MD5.new('abc').digest() - '\x90\x01P\x98<\xd2O\xb0\xd6\x96?}(\xe1\x7fr' + >>> MD5.new(b'abc').digest() + b'\x90\x01P\x98<\xd2O\xb0\xd6\x96?}(\xe1\x7fr' Rationale @@ -145,12 +175,58 @@ for keyed hashes more obscure to avoid this potential error. +Changes from Version 1.0 to Version 2.0 + + Version 2.0 of API for Cryptographic Hash Functions clarifies some + aspects of the API and brings it up-to-date. It also formalized aspects + that were already de-facto standards and provided by most + implementations. + + Version 2.0 introduces the following new attributes: + + name + + The name property was made mandatory by :issue:`18532`. + + block_size + + The new version also specifies that the return value + ``NotImplemented`` prevents HMAC support. + + Version 2.0 takes the separation of binary and text data in Python + 3.0 into account. The 'string' argument to new() and update() as + well as the 'key' argument must be bytes-like objects. On Python + 2.x a hashing object may also support ASCII-only unicode. The actual + name of argument is not changed as it is part of the public API. + Code may depend on the fact that the argument is called 'string'. + + +Recommanded names for common hashing algorithms + + algorithm variant recommended name + ---------- --------- ---------------- + MD5 md5 + RIPEMD-160 ripemd160 + SHA-1 sha1 + SHA-2 SHA-224 sha224 + SHA-256 sha256 + SHA-384 sha384 + SHA-512 sha512 + SHA-3 SHA-3-224 sha3_224 + SHA-3-256 sha3_256 + SHA-3-384 sha3_384 + SHA-3-512 sha3_512 + WHIRLPOOL whirlpool + + Changes 2001-09-17: Renamed clear() to reset(); added digest_size attribute - to objects; added .hexdigest() method. + to objects; added .hexdigest() method. 2001-09-20: Removed reset() method completely. 2001-09-28: Set digest_size to None for variable-size hashes. + 2013-08-15: Added block_size and name attributes; clarified that + 'string' actually referes to bytes-like objects. Acknowledgements -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Aug 17 14:54:30 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 17 Aug 2013 14:54:30 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogIzE4NzA1OiBmaXgg?= =?utf-8?q?a_number_of_typos=2E__Patch_by_F=C3=A9vry_Thibault=2E?= Message-ID: <3cHLvV5LkGz7LjP@mail.python.org> http://hg.python.org/cpython/rev/d234dd21374a changeset: 85220:d234dd21374a branch: 2.7 parent: 85215:aea67c35e3ce user: Ezio Melotti date: Sat Aug 17 15:43:51 2013 +0300 summary: #18705: fix a number of typos. Patch by F?vry Thibault. files: Lib/Cookie.py | 2 +- Lib/_osx_support.py | 8 ++++---- Lib/_strptime.py | 2 +- Lib/aifc.py | 2 +- Lib/calendar.py | 2 +- Lib/compiler/pyassem.py | 2 +- Lib/difflib.py | 6 +++--- Lib/doctest.py | 4 ++-- Lib/genericpath.py | 2 +- Lib/inspect.py | 2 +- Lib/lib-tk/turtle.py | 8 ++++---- Lib/modulefinder.py | 2 +- Lib/multiprocessing/forking.py | 2 +- Lib/optparse.py | 2 +- Lib/pdb.py | 2 +- Lib/pickletools.py | 4 ++-- Lib/platform.py | 6 +++--- Lib/poplib.py | 2 +- Lib/rlcompleter.py | 2 +- Lib/subprocess.py | 2 +- Lib/tarfile.py | 2 +- Misc/ACKS | 1 + 22 files changed, 34 insertions(+), 33 deletions(-) diff --git a/Lib/Cookie.py b/Lib/Cookie.py --- a/Lib/Cookie.py +++ b/Lib/Cookie.py @@ -238,7 +238,7 @@ # a two-way quoting algorithm. Any non-text character is translated # into a 4 character sequence: a forward-slash followed by the # three-digit octal equivalent of the character. Any '\' or '"' is -# quoted with a preceeding '\' slash. +# quoted with a preceding '\' slash. # # These are taken from RFC2068 and RFC2109. # _LegalChars is the list of chars which don't require "'s diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py --- a/Lib/_osx_support.py +++ b/Lib/_osx_support.py @@ -152,7 +152,7 @@ # are not installed. # # Futhermore, the compiler that can be used varies between - # Xcode releases. Upto Xcode 4 it was possible to use 'gcc-4.2' + # Xcode releases. Up to Xcode 4 it was possible to use 'gcc-4.2' # as the compiler, after that 'clang' should be used because # gcc-4.2 is either not present, or a copy of 'llvm-gcc' that # miscompiles Python. @@ -192,7 +192,7 @@ if cc != oldcc: # Found a replacement compiler. - # Modify config vars using new compiler, if not already explictly + # Modify config vars using new compiler, if not already explicitly # overriden by an env variable, preserving additional arguments. for cv in _COMPILER_CONFIG_VARS: if cv in _config_vars and cv not in os.environ: @@ -274,7 +274,7 @@ # compile an extension using an SDK that is not present # on the current machine it is better to not use an SDK # than to fail. This is particularly important with - # the standalong Command Line Tools alternative to a + # the standalone Command Line Tools alternative to a # full-blown Xcode install since the CLT packages do not # provide SDKs. If the SDK is not present, it is assumed # that the header files and dev libs have been installed @@ -378,7 +378,7 @@ compilers are present, i.e. when installing pure Python dists. Customization of compiler paths and detection of unavailable archs is deferred - until the first extention module build is + until the first extension module build is requested (in distutils.sysconfig.customize_compiler). Currently called from distutils.sysconfig diff --git a/Lib/_strptime.py b/Lib/_strptime.py --- a/Lib/_strptime.py +++ b/Lib/_strptime.py @@ -222,7 +222,7 @@ """Convert a list to a regex string for matching a directive. Want possible matching values to be from longest to shortest. This - prevents the possibility of a match occuring for a value that also + prevents the possibility of a match occurring for a value that also a substring of a larger value that should have matched (e.g., 'abc' matching when 'abcdef' should have been the match). diff --git a/Lib/aifc.py b/Lib/aifc.py --- a/Lib/aifc.py +++ b/Lib/aifc.py @@ -123,7 +123,7 @@ compression type, and then write audio frames using writeframesraw. When all frames have been written, either call writeframes('') or close() to patch up the sizes in the header. -Marks can be added anytime. If there are any marks, ypu must call +Marks can be added anytime. If there are any marks, you must call close() after all frames have been written. The close() method is called automatically when the class instance is destroyed. diff --git a/Lib/calendar.py b/Lib/calendar.py --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -220,7 +220,7 @@ def yeardatescalendar(self, year, width=3): """ Return the data for the specified year ready for formatting. The return - value is a list of month rows. Each month row contains upto width months. + value is a list of month rows. Each month row contains up to width months. Each month contains between 4 and 6 weeks and each week contains 1-7 days. Days are datetime.date objects. """ diff --git a/Lib/compiler/pyassem.py b/Lib/compiler/pyassem.py --- a/Lib/compiler/pyassem.py +++ b/Lib/compiler/pyassem.py @@ -125,7 +125,7 @@ # Make sure every block appears in dominators, even if no # other block must precede it. dominators.setdefault(b, set()) - # preceeding blocks dominate following blocks + # preceding blocks dominate following blocks for c in b.get_followers(): while 1: dominators.setdefault(c, set()).add(b) diff --git a/Lib/difflib.py b/Lib/difflib.py --- a/Lib/difflib.py +++ b/Lib/difflib.py @@ -586,7 +586,7 @@ def get_grouped_opcodes(self, n=3): """ Isolate change clusters by eliminating ranges with no changes. - Return a generator of groups with upto n lines of context. + Return a generator of groups with up to n lines of context. Each group is in the same format as returned by get_opcodes(). >>> from pprint import pprint @@ -1361,7 +1361,7 @@ linejunk -- passed on to ndiff (see ndiff documentation) charjunk -- passed on to ndiff (see ndiff documentation) - This function returns an interator which returns a tuple: + This function returns an iterator which returns a tuple: (from line tuple, to line tuple, boolean flag) from/to line tuple -- (line num, line text) @@ -1963,7 +1963,7 @@ self._make_prefix() # change tabs to spaces before it gets more difficult after we insert - # markkup + # markup fromlines,tolines = self._tab_newline_replace(fromlines,tolines) # create diffs iterator which generates side by side from/to data diff --git a/Lib/doctest.py b/Lib/doctest.py --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -424,7 +424,7 @@ zero-based, with respect to the beginning of the DocTest. - indent: The example's indentation in the DocTest string. - I.e., the number of space characters that preceed the + I.e., the number of space characters that precede the example's first prompt. - options: A dictionary mapping from option flags to True or @@ -895,7 +895,7 @@ if '__name__' not in globs: globs['__name__'] = '__main__' # provide a default module name - # Recursively expore `obj`, extracting DocTests. + # Recursively explore `obj`, extracting DocTests. tests = [] self._find(tests, obj, name, module, source_lines, globs, {}) # Sort the tests by alpha order of names, for consistency in diff --git a/Lib/genericpath.py b/Lib/genericpath.py --- a/Lib/genericpath.py +++ b/Lib/genericpath.py @@ -22,7 +22,7 @@ # This follows symbolic links, so both islink() and isdir() can be true -# for the same path ono systems that support symlinks +# for the same path on systems that support symlinks def isfile(path): """Test whether a path is a regular file""" try: diff --git a/Lib/inspect.py b/Lib/inspect.py --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -165,7 +165,7 @@ """Return true if the object is a generator. Generator objects provide these attributes: - __iter__ defined to support interation over container + __iter__ defined to support iteration over container close raises a new GeneratorExit exception inside the generator to terminate the iteration gi_code code object diff --git a/Lib/lib-tk/turtle.py b/Lib/lib-tk/turtle.py --- a/Lib/lib-tk/turtle.py +++ b/Lib/lib-tk/turtle.py @@ -1233,7 +1233,7 @@ self._delayvalue = int(delay) def _incrementudc(self): - """Increment upadate counter.""" + """Increment update counter.""" if not TurtleScreen._RUNNING: TurtleScreen._RUNNNING = True raise Terminator @@ -2439,7 +2439,7 @@ self.screen = TurtleScreen(canvas) RawTurtle.screens.append(self.screen) else: - raise TurtleGraphicsError("bad cavas argument %s" % canvas) + raise TurtleGraphicsError("bad canvas argument %s" % canvas) screen = self.screen TNavigator.__init__(self, screen.mode()) @@ -2684,7 +2684,7 @@ def shapesize(self, stretch_wid=None, stretch_len=None, outline=None): """Set/return turtle's stretchfactors/outline. Set resizemode to "user". - Optinonal arguments: + Optional arguments: stretch_wid : positive number stretch_len : positive number outline : positive number @@ -2975,7 +2975,7 @@ def _goto(self, end): """Move the pen to the point end, thereby drawing a line - if pen is down. All other methodes for turtle movement depend + if pen is down. All other methods for turtle movement depend on this one. """ ## Version mit undo-stuff diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py --- a/Lib/modulefinder.py +++ b/Lib/modulefinder.py @@ -516,7 +516,7 @@ # Print modules that may be missing, but then again, maybe not... if maybe: print - print "Submodules thay appear to be missing, but could also be", + print "Submodules that appear to be missing, but could also be", print "global names in the parent package:" for name in maybe: mods = self.badmodules[name].keys() diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py --- a/Lib/multiprocessing/forking.py +++ b/Lib/multiprocessing/forking.py @@ -367,7 +367,7 @@ def main(): ''' - Run code specifed by data received over pipe + Run code specified by data received over pipe ''' assert is_forking(sys.argv) diff --git a/Lib/optparse.py b/Lib/optparse.py --- a/Lib/optparse.py +++ b/Lib/optparse.py @@ -1471,7 +1471,7 @@ """_match_long_opt(opt : string) -> string Determine which long option string 'opt' matches, ie. which one - it is an unambiguous abbrevation for. Raises BadOptionError if + it is an unambiguous abbreviation for. Raises BadOptionError if 'opt' doesn't unambiguously match any long option string. """ return _match_abbrev(opt, self._long_opt) diff --git a/Lib/pdb.py b/Lib/pdb.py --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -1095,7 +1095,7 @@ def help_run(self): print """run [args...] Restart the debugged python program. If a string is supplied, it is -splitted with "shlex" and the result is used as the new sys.argv. +split with "shlex" and the result is used as the new sys.argv. History, breakpoints, actions and debugger options are preserved. "restart" is an alias for "run".""" diff --git a/Lib/pickletools.py b/Lib/pickletools.py --- a/Lib/pickletools.py +++ b/Lib/pickletools.py @@ -804,7 +804,7 @@ obtype=StackObject, doc="""An object representing a contiguous slice of the stack. - This is used in conjuction with markobject, to represent all + This is used in conjunction with markobject, to represent all of the stack following the topmost markobject. For example, the POP_MARK opcode changes the stack from @@ -1929,7 +1929,7 @@ stack = [] # crude emulation of unpickler stack if memo is None: - memo = {} # crude emulation of unpicker memo + memo = {} # crude emulation of unpickler memo maxproto = -1 # max protocol number seen markstack = [] # bytecode positions of MARK opcodes indentchunk = ' ' * indentlevel diff --git a/Lib/platform.py b/Lib/platform.py --- a/Lib/platform.py +++ b/Lib/platform.py @@ -228,7 +228,7 @@ return 'OpenLinux',pkg[1],id if os.path.isdir('/usr/lib/setup'): - # Check for slackware verson tag file (thanks to Greg Andruk) + # Check for slackware version tag file (thanks to Greg Andruk) verfiles = os.listdir('/usr/lib/setup') for n in range(len(verfiles)-1, -1, -1): if verfiles[n][:14] != 'slack-version-': @@ -280,7 +280,7 @@ if m is not None: return tuple(m.groups()) - # Unkown format... take the first two words + # Unknown format... take the first two words l = string.split(string.strip(firstline)) if l: version = l[0] @@ -800,7 +800,7 @@ versioninfo, machine) with versioninfo being a tuple (version, dev_stage, non_release_version). - Entries which cannot be determined are set to the paramter values + Entries which cannot be determined are set to the parameter values which default to ''. All tuple entries are strings. """ diff --git a/Lib/poplib.py b/Lib/poplib.py --- a/Lib/poplib.py +++ b/Lib/poplib.py @@ -321,7 +321,7 @@ hostname - the hostname of the pop3 over ssl server port - port number - keyfile - PEM formatted file that countains your private key + keyfile - PEM formatted file that contains your private key certfile - PEM formatted certificate chain file See the methods of the parent class POP3 for more documentation. diff --git a/Lib/rlcompleter.py b/Lib/rlcompleter.py --- a/Lib/rlcompleter.py +++ b/Lib/rlcompleter.py @@ -116,7 +116,7 @@ """Compute matches when text contains a dot. Assuming the text is of the form NAME.NAME....[NAME], and is - evaluatable in self.namespace, it will be evaluated and its attributes + evaluable in self.namespace, it will be evaluated and its attributes (as revealed by dir()) are used as possible completions. (For class instances, class members are also considered.) diff --git a/Lib/subprocess.py b/Lib/subprocess.py --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -143,7 +143,7 @@ started to execute, will be re-raised in the parent. Additionally, the exception object will have one extra attribute called 'child_traceback', which is a string containing traceback information -from the childs point of view. +from the child's point of view. The most common exception raised is OSError. This occurs, for example, when trying to execute a non-existent file. Applications diff --git a/Lib/tarfile.py b/Lib/tarfile.py --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -330,7 +330,7 @@ """General exception for extract errors.""" pass class ReadError(TarError): - """Exception for unreadble tar archives.""" + """Exception for unreadable tar archives.""" pass class CompressionError(TarError): """Exception for unavailable compression methods.""" diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1015,6 +1015,7 @@ Victor Terr?n Richard M. Tew Tobias Thelen +F?vry Thibault Lowe Thiderman Nicolas M. Thi?ry James Thomas -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 14:54:32 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 17 Aug 2013 14:54:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NzA1OiBmaXgg?= =?utf-8?q?a_number_of_typos=2E__Patch_by_F=C3=A9vry_Thibault=2E?= Message-ID: <3cHLvX2xm7z7Ljd@mail.python.org> http://hg.python.org/cpython/rev/e07f104133d5 changeset: 85221:e07f104133d5 branch: 3.3 parent: 85216:80c475617608 user: Ezio Melotti date: Sat Aug 17 15:50:46 2013 +0300 summary: #18705: fix a number of typos. Patch by F?vry Thibault. files: Lib/_osx_support.py | 10 +++++----- Lib/_strptime.py | 2 +- Lib/aifc.py | 2 +- Lib/calendar.py | 2 +- Lib/datetime.py | 2 +- Lib/difflib.py | 6 +++--- Lib/doctest.py | 4 ++-- Lib/genericpath.py | 2 +- Lib/inspect.py | 4 ++-- Lib/ipaddress.py | 4 ++-- Lib/modulefinder.py | 2 +- Lib/multiprocessing/forking.py | 2 +- Lib/optparse.py | 2 +- Lib/pdb.py | 4 ++-- Lib/pickletools.py | 6 +++--- Lib/platform.py | 6 +++--- Lib/poplib.py | 2 +- Lib/rlcompleter.py | 2 +- Lib/shutil.py | 6 +++--- Lib/ssl.py | 2 +- Lib/subprocess.py | 2 +- Lib/tarfile.py | 2 +- Lib/tempfile.py | 2 +- Lib/threading.py | 2 +- Lib/turtle.py | 8 ++++---- 25 files changed, 44 insertions(+), 44 deletions(-) diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py --- a/Lib/_osx_support.py +++ b/Lib/_osx_support.py @@ -53,7 +53,7 @@ def _read_output(commandstring): - """Output from succesful command execution or None""" + """Output from successful command execution or None""" # Similar to os.popen(commandstring, "r").read(), # but without actually using os.popen because that # function is not usable during python bootstrap. @@ -152,7 +152,7 @@ # are not installed. # # Futhermore, the compiler that can be used varies between - # Xcode releases. Upto Xcode 4 it was possible to use 'gcc-4.2' + # Xcode releases. Up to Xcode 4 it was possible to use 'gcc-4.2' # as the compiler, after that 'clang' should be used because # gcc-4.2 is either not present, or a copy of 'llvm-gcc' that # miscompiles Python. @@ -192,7 +192,7 @@ if cc != oldcc: # Found a replacement compiler. - # Modify config vars using new compiler, if not already explictly + # Modify config vars using new compiler, if not already explicitly # overriden by an env variable, preserving additional arguments. for cv in _COMPILER_CONFIG_VARS: if cv in _config_vars and cv not in os.environ: @@ -274,7 +274,7 @@ # compile an extension using an SDK that is not present # on the current machine it is better to not use an SDK # than to fail. This is particularly important with - # the standalong Command Line Tools alternative to a + # the standalone Command Line Tools alternative to a # full-blown Xcode install since the CLT packages do not # provide SDKs. If the SDK is not present, it is assumed # that the header files and dev libs have been installed @@ -378,7 +378,7 @@ compilers are present, i.e. when installing pure Python dists. Customization of compiler paths and detection of unavailable archs is deferred - until the first extention module build is + until the first extension module build is requested (in distutils.sysconfig.customize_compiler). Currently called from distutils.sysconfig diff --git a/Lib/_strptime.py b/Lib/_strptime.py --- a/Lib/_strptime.py +++ b/Lib/_strptime.py @@ -225,7 +225,7 @@ """Convert a list to a regex string for matching a directive. Want possible matching values to be from longest to shortest. This - prevents the possibility of a match occuring for a value that also + prevents the possibility of a match occurring for a value that also a substring of a larger value that should have matched (e.g., 'abc' matching when 'abcdef' should have been the match). diff --git a/Lib/aifc.py b/Lib/aifc.py --- a/Lib/aifc.py +++ b/Lib/aifc.py @@ -123,7 +123,7 @@ compression type, and then write audio frames using writeframesraw. When all frames have been written, either call writeframes('') or close() to patch up the sizes in the header. -Marks can be added anytime. If there are any marks, ypu must call +Marks can be added anytime. If there are any marks, you must call close() after all frames have been written. The close() method is called automatically when the class instance is destroyed. diff --git a/Lib/calendar.py b/Lib/calendar.py --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -220,7 +220,7 @@ def yeardatescalendar(self, year, width=3): """ Return the data for the specified year ready for formatting. The return - value is a list of month rows. Each month row contains upto width months. + value is a list of month rows. Each month row contains up to width months. Each month contains between 4 and 6 weeks and each week contains 1-7 days. Days are datetime.date objects. """ diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -49,7 +49,7 @@ return _DAYS_IN_MONTH[month] def _days_before_month(year, month): - "year, month -> number of days in year preceeding first day of month." + "year, month -> number of days in year preceding first day of month." assert 1 <= month <= 12, 'month must be in 1..12' return _DAYS_BEFORE_MONTH[month] + (month > 2 and _is_leap(year)) diff --git a/Lib/difflib.py b/Lib/difflib.py --- a/Lib/difflib.py +++ b/Lib/difflib.py @@ -589,7 +589,7 @@ def get_grouped_opcodes(self, n=3): """ Isolate change clusters by eliminating ranges with no changes. - Return a generator of groups with upto n lines of context. + Return a generator of groups with up to n lines of context. Each group is in the same format as returned by get_opcodes(). >>> from pprint import pprint @@ -1365,7 +1365,7 @@ linejunk -- passed on to ndiff (see ndiff documentation) charjunk -- passed on to ndiff (see ndiff documentation) - This function returns an interator which returns a tuple: + This function returns an iterator which returns a tuple: (from line tuple, to line tuple, boolean flag) from/to line tuple -- (line num, line text) @@ -1967,7 +1967,7 @@ self._make_prefix() # change tabs to spaces before it gets more difficult after we insert - # markkup + # markup fromlines,tolines = self._tab_newline_replace(fromlines,tolines) # create diffs iterator which generates side by side from/to data diff --git a/Lib/doctest.py b/Lib/doctest.py --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -413,7 +413,7 @@ zero-based, with respect to the beginning of the DocTest. - indent: The example's indentation in the DocTest string. - I.e., the number of space characters that preceed the + I.e., the number of space characters that precede the example's first prompt. - options: A dictionary mapping from option flags to True or @@ -893,7 +893,7 @@ if '__name__' not in globs: globs['__name__'] = '__main__' # provide a default module name - # Recursively expore `obj`, extracting DocTests. + # Recursively explore `obj`, extracting DocTests. tests = [] self._find(tests, obj, name, module, source_lines, globs, {}) # Sort the tests by alpha order of names, for consistency in diff --git a/Lib/genericpath.py b/Lib/genericpath.py --- a/Lib/genericpath.py +++ b/Lib/genericpath.py @@ -22,7 +22,7 @@ # This follows symbolic links, so both islink() and isdir() can be true -# for the same path ono systems that support symlinks +# for the same path on systems that support symlinks def isfile(path): """Test whether a path is a regular file""" try: diff --git a/Lib/inspect.py b/Lib/inspect.py --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -48,7 +48,7 @@ # Create constants for the compiler flags in Include/code.h # We try to get them from dis to avoid duplication, but fall -# back to hardcording so the dependency is optional +# back to hardcoding so the dependency is optional try: from dis import COMPILER_FLAG_NAMES as _flag_names except ImportError: @@ -185,7 +185,7 @@ """Return true if the object is a generator. Generator objects provide these attributes: - __iter__ defined to support interation over container + __iter__ defined to support iteration over container close raises a new GeneratorExit exception inside the generator to terminate the iteration gi_code code object diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -727,7 +727,7 @@ minus other. Raises: - TypeError: If self and other are of difffering address + TypeError: If self and other are of differing address versions, or if other is not a network object. ValueError: If other is not completely contained by self. @@ -1378,7 +1378,7 @@ '192.0.2.1' '192.0.2.1/255.255.255.255' '192.0.2.1/32' - are also functionaly equivalent. That is to say, failing to + are also functionally equivalent. That is to say, failing to provide a subnetmask will create an object with a mask of /32. If the mask (portion after the / in the argument) is given in diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py --- a/Lib/modulefinder.py +++ b/Lib/modulefinder.py @@ -509,7 +509,7 @@ # Print modules that may be missing, but then again, maybe not... if maybe: print() - print("Submodules thay appear to be missing, but could also be", end=' ') + print("Submodules that appear to be missing, but could also be", end=' ') print("global names in the parent package:") for name in maybe: mods = sorted(self.badmodules[name].keys()) diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py --- a/Lib/multiprocessing/forking.py +++ b/Lib/multiprocessing/forking.py @@ -337,7 +337,7 @@ def main(): ''' - Run code specifed by data received over pipe + Run code specified by data received over pipe ''' assert is_forking(sys.argv) diff --git a/Lib/optparse.py b/Lib/optparse.py --- a/Lib/optparse.py +++ b/Lib/optparse.py @@ -1463,7 +1463,7 @@ """_match_long_opt(opt : string) -> string Determine which long option string 'opt' matches, ie. which one - it is an unambiguous abbrevation for. Raises BadOptionError if + it is an unambiguous abbreviation for. Raises BadOptionError if 'opt' doesn't unambiguously match any long option string. """ return _match_abbrev(opt, self._long_opt) diff --git a/Lib/pdb.py b/Lib/pdb.py --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -1004,7 +1004,7 @@ def do_run(self, arg): """run [args...] Restart the debugged python program. If a string is supplied - it is splitted with "shlex", and the result is used as the new + it is split with "shlex", and the result is used as the new sys.argv. History, breakpoints, actions and debugger options are preserved. "restart" is an alias for "run". """ @@ -1361,7 +1361,7 @@ def do_interact(self, arg): """interact - Start an interative interpreter whose global namespace + Start an interactive interpreter whose global namespace contains all the (global and local) names found in the current scope. """ ns = self.curframe.f_globals.copy() diff --git a/Lib/pickletools.py b/Lib/pickletools.py --- a/Lib/pickletools.py +++ b/Lib/pickletools.py @@ -888,7 +888,7 @@ obtype=StackObject, doc="""An object representing a contiguous slice of the stack. - This is used in conjuction with markobject, to represent all + This is used in conjunction with markobject, to represent all of the stack following the topmost markobject. For example, the POP_MARK opcode changes the stack from @@ -2038,12 +2038,12 @@ stack = [] # crude emulation of unpickler stack if memo is None: - memo = {} # crude emulation of unpicker memo + memo = {} # crude emulation of unpickler memo maxproto = -1 # max protocol number seen markstack = [] # bytecode positions of MARK opcodes indentchunk = ' ' * indentlevel errormsg = None - annocol = annotate # columnt hint for annotations + annocol = annotate # column hint for annotations for opcode, arg, pos in genops(pickle): if pos is not None: print("%5d:" % pos, end=' ', file=out) diff --git a/Lib/platform.py b/Lib/platform.py --- a/Lib/platform.py +++ b/Lib/platform.py @@ -230,7 +230,7 @@ return 'OpenLinux',pkg[1],id if os.path.isdir('/usr/lib/setup'): - # Check for slackware verson tag file (thanks to Greg Andruk) + # Check for slackware version tag file (thanks to Greg Andruk) verfiles = os.listdir('/usr/lib/setup') for n in range(len(verfiles)-1, -1, -1): if verfiles[n][:14] != 'slack-version-': @@ -282,7 +282,7 @@ if m is not None: return tuple(m.groups()) - # Unkown format... take the first two words + # Unknown format... take the first two words l = firstline.strip().split() if l: version = l[0] @@ -720,7 +720,7 @@ versioninfo, machine) with versioninfo being a tuple (version, dev_stage, non_release_version). - Entries which cannot be determined are set to the paramter values + Entries which cannot be determined are set to the parameter values which default to ''. All tuple entries are strings. """ diff --git a/Lib/poplib.py b/Lib/poplib.py --- a/Lib/poplib.py +++ b/Lib/poplib.py @@ -328,7 +328,7 @@ hostname - the hostname of the pop3 over ssl server port - port number - keyfile - PEM formatted file that countains your private key + keyfile - PEM formatted file that contains your private key certfile - PEM formatted certificate chain file See the methods of the parent class POP3 for more documentation. diff --git a/Lib/rlcompleter.py b/Lib/rlcompleter.py --- a/Lib/rlcompleter.py +++ b/Lib/rlcompleter.py @@ -110,7 +110,7 @@ """Compute matches when text contains a dot. Assuming the text is of the form NAME.NAME....[NAME], and is - evaluatable in self.namespace, it will be evaluated and its attributes + evaluable in self.namespace, it will be evaluated and its attributes (as revealed by dir()) are used as possible completions. (For class instances, class members are also considered.) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -53,7 +53,7 @@ """Raised when an archive cannot be read""" class RegistryError(Exception): - """Raised when a registery operation with the archiving + """Raised when a registry operation with the archiving and unpacking registeries fails""" @@ -195,7 +195,7 @@ # * follow_symlinks=False, # * lchown() is unavailable, and # * either - # * fchownat() is unvailable or + # * fchownat() is unavailable or # * fchownat() doesn't implement AT_SYMLINK_NOFOLLOW. # (it returned ENOSUP.) # therefore we're out of options--we simply cannot chown the @@ -984,7 +984,7 @@ def disk_usage(path): """Return disk usage statistics about the given path. - Returned valus is a named tuple with attributes 'total', 'used' and + Returned values is a named tuple with attributes 'total', 'used' and 'free', which are the amount of total, used and free space, in bytes. """ total, free = nt._getdiskusage(path) diff --git a/Lib/ssl.py b/Lib/ssl.py --- a/Lib/ssl.py +++ b/Lib/ssl.py @@ -134,7 +134,7 @@ for frag in dn.split(r'.'): if frag.count('*') > max_wildcards: # Issue #17980: avoid denials of service by refusing more - # than one wildcard per fragment. A survery of established + # than one wildcard per fragment. A survey of established # policy among SSL implementations showed it to be a # reasonable choice. raise CertificateError( diff --git a/Lib/subprocess.py b/Lib/subprocess.py --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -185,7 +185,7 @@ started to execute, will be re-raised in the parent. Additionally, the exception object will have one extra attribute called 'child_traceback', which is a string containing traceback information -from the childs point of view. +from the child's point of view. The most common exception raised is OSError. This occurs, for example, when trying to execute a non-existent file. Applications diff --git a/Lib/tarfile.py b/Lib/tarfile.py --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -289,7 +289,7 @@ """General exception for extract errors.""" pass class ReadError(TarError): - """Exception for unreadble tar archives.""" + """Exception for unreadable tar archives.""" pass class CompressionError(TarError): """Exception for unavailable compression methods.""" diff --git a/Lib/tempfile.py b/Lib/tempfile.py --- a/Lib/tempfile.py +++ b/Lib/tempfile.py @@ -634,7 +634,7 @@ with TemporaryDirectory() as tmpdir: ... - Upon exiting the context, the directory and everthing contained + Upon exiting the context, the directory and everything contained in it are removed. """ diff --git a/Lib/threading.py b/Lib/threading.py --- a/Lib/threading.py +++ b/Lib/threading.py @@ -14,7 +14,7 @@ # Note regarding PEP 8 compliant names # This threading model was originally inspired by Java, and inherited # the convention of camelCase function and method names from that -# language. Those originaly names are not in any imminent danger of +# language. Those original names are not in any imminent danger of # being deprecated (even for Py3k),so this module provides them as an # alias for the PEP 8 compliant names # Note that using the new PEP 8 compliant names facilitates substitution diff --git a/Lib/turtle.py b/Lib/turtle.py --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -1278,7 +1278,7 @@ self._delayvalue = int(delay) def _incrementudc(self): - """Increment upadate counter.""" + """Increment update counter.""" if not TurtleScreen._RUNNING: TurtleScreen._RUNNNING = True raise Terminator @@ -2527,7 +2527,7 @@ self.screen = TurtleScreen(canvas) RawTurtle.screens.append(self.screen) else: - raise TurtleGraphicsError("bad cavas argument %s" % canvas) + raise TurtleGraphicsError("bad canvas argument %s" % canvas) screen = self.screen TNavigator.__init__(self, screen.mode()) @@ -2772,7 +2772,7 @@ def shapesize(self, stretch_wid=None, stretch_len=None, outline=None): """Set/return turtle's stretchfactors/outline. Set resizemode to "user". - Optinonal arguments: + Optional arguments: stretch_wid : positive number stretch_len : positive number outline : positive number @@ -3135,7 +3135,7 @@ def _goto(self, end): """Move the pen to the point end, thereby drawing a line - if pen is down. All other methodes for turtle movement depend + if pen is down. All other methods for turtle movement depend on this one. """ ## Version with undo-stuff -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 14:54:34 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 17 Aug 2013 14:54:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NzA1OiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cHLvZ0hrWz7Ljy@mail.python.org> http://hg.python.org/cpython/rev/a9ee869cae40 changeset: 85222:a9ee869cae40 parent: 85219:f30b82d380b3 parent: 85221:e07f104133d5 user: Ezio Melotti date: Sat Aug 17 15:53:55 2013 +0300 summary: #18705: merge with 3.3. files: Lib/_osx_support.py | 10 +++++----- Lib/_strptime.py | 2 +- Lib/aifc.py | 2 +- Lib/calendar.py | 2 +- Lib/contextlib.py | 2 +- Lib/datetime.py | 2 +- Lib/difflib.py | 6 +++--- Lib/doctest.py | 4 ++-- Lib/enum.py | 6 +++--- Lib/genericpath.py | 2 +- Lib/inspect.py | 4 ++-- Lib/ipaddress.py | 4 ++-- Lib/modulefinder.py | 2 +- Lib/optparse.py | 2 +- Lib/pdb.py | 4 ++-- Lib/pickletools.py | 6 +++--- Lib/platform.py | 8 ++++---- Lib/poplib.py | 4 ++-- Lib/rlcompleter.py | 2 +- Lib/shutil.py | 6 +++--- Lib/ssl.py | 2 +- Lib/subprocess.py | 2 +- Lib/tarfile.py | 2 +- Lib/tempfile.py | 2 +- Lib/threading.py | 2 +- Lib/turtle.py | 8 ++++---- 26 files changed, 49 insertions(+), 49 deletions(-) diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py --- a/Lib/_osx_support.py +++ b/Lib/_osx_support.py @@ -53,7 +53,7 @@ def _read_output(commandstring): - """Output from succesful command execution or None""" + """Output from successful command execution or None""" # Similar to os.popen(commandstring, "r").read(), # but without actually using os.popen because that # function is not usable during python bootstrap. @@ -152,7 +152,7 @@ # are not installed. # # Futhermore, the compiler that can be used varies between - # Xcode releases. Upto Xcode 4 it was possible to use 'gcc-4.2' + # Xcode releases. Up to Xcode 4 it was possible to use 'gcc-4.2' # as the compiler, after that 'clang' should be used because # gcc-4.2 is either not present, or a copy of 'llvm-gcc' that # miscompiles Python. @@ -192,7 +192,7 @@ if cc != oldcc: # Found a replacement compiler. - # Modify config vars using new compiler, if not already explictly + # Modify config vars using new compiler, if not already explicitly # overriden by an env variable, preserving additional arguments. for cv in _COMPILER_CONFIG_VARS: if cv in _config_vars and cv not in os.environ: @@ -274,7 +274,7 @@ # compile an extension using an SDK that is not present # on the current machine it is better to not use an SDK # than to fail. This is particularly important with - # the standalong Command Line Tools alternative to a + # the standalone Command Line Tools alternative to a # full-blown Xcode install since the CLT packages do not # provide SDKs. If the SDK is not present, it is assumed # that the header files and dev libs have been installed @@ -378,7 +378,7 @@ compilers are present, i.e. when installing pure Python dists. Customization of compiler paths and detection of unavailable archs is deferred - until the first extention module build is + until the first extension module build is requested (in distutils.sysconfig.customize_compiler). Currently called from distutils.sysconfig diff --git a/Lib/_strptime.py b/Lib/_strptime.py --- a/Lib/_strptime.py +++ b/Lib/_strptime.py @@ -225,7 +225,7 @@ """Convert a list to a regex string for matching a directive. Want possible matching values to be from longest to shortest. This - prevents the possibility of a match occuring for a value that also + prevents the possibility of a match occurring for a value that also a substring of a larger value that should have matched (e.g., 'abc' matching when 'abcdef' should have been the match). diff --git a/Lib/aifc.py b/Lib/aifc.py --- a/Lib/aifc.py +++ b/Lib/aifc.py @@ -123,7 +123,7 @@ compression type, and then write audio frames using writeframesraw. When all frames have been written, either call writeframes('') or close() to patch up the sizes in the header. -Marks can be added anytime. If there are any marks, ypu must call +Marks can be added anytime. If there are any marks, you must call close() after all frames have been written. The close() method is called automatically when the class instance is destroyed. diff --git a/Lib/calendar.py b/Lib/calendar.py --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -220,7 +220,7 @@ def yeardatescalendar(self, year, width=3): """ Return the data for the specified year ready for formatting. The return - value is a list of month rows. Each month row contains upto width months. + value is a list of month rows. Each month row contains up to width months. Each month contains between 4 and 6 weeks and each week contains 1-7 days. Days are datetime.date objects. """ diff --git a/Lib/contextlib.py b/Lib/contextlib.py --- a/Lib/contextlib.py +++ b/Lib/contextlib.py @@ -142,7 +142,7 @@ @contextmanager def ignored(*exceptions): - """Context manager to ignore specifed exceptions + """Context manager to ignore specified exceptions with ignored(OSError): os.remove(somefile) diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -49,7 +49,7 @@ return _DAYS_IN_MONTH[month] def _days_before_month(year, month): - "year, month -> number of days in year preceeding first day of month." + "year, month -> number of days in year preceding first day of month." assert 1 <= month <= 12, 'month must be in 1..12' return _DAYS_BEFORE_MONTH[month] + (month > 2 and _is_leap(year)) diff --git a/Lib/difflib.py b/Lib/difflib.py --- a/Lib/difflib.py +++ b/Lib/difflib.py @@ -575,7 +575,7 @@ def get_grouped_opcodes(self, n=3): """ Isolate change clusters by eliminating ranges with no changes. - Return a generator of groups with upto n lines of context. + Return a generator of groups with up to n lines of context. Each group is in the same format as returned by get_opcodes(). >>> from pprint import pprint @@ -1344,7 +1344,7 @@ linejunk -- passed on to ndiff (see ndiff documentation) charjunk -- passed on to ndiff (see ndiff documentation) - This function returns an interator which returns a tuple: + This function returns an iterator which returns a tuple: (from line tuple, to line tuple, boolean flag) from/to line tuple -- (line num, line text) @@ -1946,7 +1946,7 @@ self._make_prefix() # change tabs to spaces before it gets more difficult after we insert - # markkup + # markup fromlines,tolines = self._tab_newline_replace(fromlines,tolines) # create diffs iterator which generates side by side from/to data diff --git a/Lib/doctest.py b/Lib/doctest.py --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -417,7 +417,7 @@ zero-based, with respect to the beginning of the DocTest. - indent: The example's indentation in the DocTest string. - I.e., the number of space characters that preceed the + I.e., the number of space characters that precede the example's first prompt. - options: A dictionary mapping from option flags to True or @@ -897,7 +897,7 @@ if '__name__' not in globs: globs['__name__'] = '__main__' # provide a default module name - # Recursively expore `obj`, extracting DocTests. + # Recursively explore `obj`, extracting DocTests. tests = [] self._find(tests, obj, name, module, source_lines, globs, {}) # Sort the tests by alpha order of names, for consistency in diff --git a/Lib/enum.py b/Lib/enum.py --- a/Lib/enum.py +++ b/Lib/enum.py @@ -90,9 +90,9 @@ super().__setitem__(key, value) -# Dummy value for Enum as EnumMeta explicity checks for it, but of course until -# EnumMeta finishes running the first time the Enum class doesn't exist. This -# is also why there are checks in EnumMeta like `if Enum is not None` +# Dummy value for Enum as EnumMeta explicitly checks for it, but of course +# until EnumMeta finishes running the first time the Enum class doesn't exist. +# This is also why there are checks in EnumMeta like `if Enum is not None` Enum = None diff --git a/Lib/genericpath.py b/Lib/genericpath.py --- a/Lib/genericpath.py +++ b/Lib/genericpath.py @@ -23,7 +23,7 @@ # This follows symbolic links, so both islink() and isdir() can be true -# for the same path ono systems that support symlinks +# for the same path on systems that support symlinks def isfile(path): """Test whether a path is a regular file""" try: diff --git a/Lib/inspect.py b/Lib/inspect.py --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -47,7 +47,7 @@ # Create constants for the compiler flags in Include/code.h # We try to get them from dis to avoid duplication, but fall -# back to hardcording so the dependency is optional +# back to hardcoding so the dependency is optional try: from dis import COMPILER_FLAG_NAMES as _flag_names except ImportError: @@ -184,7 +184,7 @@ """Return true if the object is a generator. Generator objects provide these attributes: - __iter__ defined to support interation over container + __iter__ defined to support iteration over container close raises a new GeneratorExit exception inside the generator to terminate the iteration gi_code code object diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -727,7 +727,7 @@ minus other. Raises: - TypeError: If self and other are of difffering address + TypeError: If self and other are of differing address versions, or if other is not a network object. ValueError: If other is not completely contained by self. @@ -1378,7 +1378,7 @@ '192.0.2.1' '192.0.2.1/255.255.255.255' '192.0.2.1/32' - are also functionaly equivalent. That is to say, failing to + are also functionally equivalent. That is to say, failing to provide a subnetmask will create an object with a mask of /32. If the mask (portion after the / in the argument) is given in diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py --- a/Lib/modulefinder.py +++ b/Lib/modulefinder.py @@ -512,7 +512,7 @@ # Print modules that may be missing, but then again, maybe not... if maybe: print() - print("Submodules thay appear to be missing, but could also be", end=' ') + print("Submodules that appear to be missing, but could also be", end=' ') print("global names in the parent package:") for name in maybe: mods = sorted(self.badmodules[name].keys()) diff --git a/Lib/optparse.py b/Lib/optparse.py --- a/Lib/optparse.py +++ b/Lib/optparse.py @@ -1463,7 +1463,7 @@ """_match_long_opt(opt : string) -> string Determine which long option string 'opt' matches, ie. which one - it is an unambiguous abbrevation for. Raises BadOptionError if + it is an unambiguous abbreviation for. Raises BadOptionError if 'opt' doesn't unambiguously match any long option string. """ return _match_abbrev(opt, self._long_opt) diff --git a/Lib/pdb.py b/Lib/pdb.py --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -1004,7 +1004,7 @@ def do_run(self, arg): """run [args...] Restart the debugged python program. If a string is supplied - it is splitted with "shlex", and the result is used as the new + it is split with "shlex", and the result is used as the new sys.argv. History, breakpoints, actions and debugger options are preserved. "restart" is an alias for "run". """ @@ -1361,7 +1361,7 @@ def do_interact(self, arg): """interact - Start an interative interpreter whose global namespace + Start an interactive interpreter whose global namespace contains all the (global and local) names found in the current scope. """ ns = self.curframe.f_globals.copy() diff --git a/Lib/pickletools.py b/Lib/pickletools.py --- a/Lib/pickletools.py +++ b/Lib/pickletools.py @@ -887,7 +887,7 @@ obtype=StackObject, doc="""An object representing a contiguous slice of the stack. - This is used in conjuction with markobject, to represent all + This is used in conjunction with markobject, to represent all of the stack following the topmost markobject. For example, the POP_MARK opcode changes the stack from @@ -2037,12 +2037,12 @@ stack = [] # crude emulation of unpickler stack if memo is None: - memo = {} # crude emulation of unpicker memo + memo = {} # crude emulation of unpickler memo maxproto = -1 # max protocol number seen markstack = [] # bytecode positions of MARK opcodes indentchunk = ' ' * indentlevel errormsg = None - annocol = annotate # columnt hint for annotations + annocol = annotate # column hint for annotations for opcode, arg, pos in genops(pickle): if pos is not None: print("%5d:" % pos, end=' ', file=out) diff --git a/Lib/platform.py b/Lib/platform.py --- a/Lib/platform.py +++ b/Lib/platform.py @@ -230,7 +230,7 @@ return 'OpenLinux',pkg[1],id if os.path.isdir('/usr/lib/setup'): - # Check for slackware verson tag file (thanks to Greg Andruk) + # Check for slackware version tag file (thanks to Greg Andruk) verfiles = os.listdir('/usr/lib/setup') for n in range(len(verfiles)-1, -1, -1): if verfiles[n][:14] != 'slack-version-': @@ -282,7 +282,7 @@ if m is not None: return tuple(m.groups()) - # Unkown format... take the first two words + # Unknown format... take the first two words l = firstline.strip().split() if l: version = l[0] @@ -649,7 +649,7 @@ versioninfo=('', '', '') machine = os.uname().machine if machine in ('ppc', 'Power Macintosh'): - # Cannonical name + # Canonical name machine = 'PowerPC' return release,versioninfo,machine @@ -661,7 +661,7 @@ versioninfo, machine) with versioninfo being a tuple (version, dev_stage, non_release_version). - Entries which cannot be determined are set to the paramter values + Entries which cannot be determined are set to the parameter values which default to ''. All tuple entries are strings. """ diff --git a/Lib/poplib.py b/Lib/poplib.py --- a/Lib/poplib.py +++ b/Lib/poplib.py @@ -345,7 +345,7 @@ >>> Really, according to RFC 2449, the cyrus folks should avoid - having the implementation splitted into multiple arguments... + having the implementation split into multiple arguments... """ def _parsecap(line): lst = line.decode('ascii').split() @@ -395,7 +395,7 @@ hostname - the hostname of the pop3 over ssl server port - port number - keyfile - PEM formatted file that countains your private key + keyfile - PEM formatted file that contains your private key certfile - PEM formatted certificate chain file context - a ssl.SSLContext diff --git a/Lib/rlcompleter.py b/Lib/rlcompleter.py --- a/Lib/rlcompleter.py +++ b/Lib/rlcompleter.py @@ -111,7 +111,7 @@ """Compute matches when text contains a dot. Assuming the text is of the form NAME.NAME....[NAME], and is - evaluatable in self.namespace, it will be evaluated and its attributes + evaluable in self.namespace, it will be evaluated and its attributes (as revealed by dir()) are used as possible completions. (For class instances, class members are also considered.) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -56,7 +56,7 @@ """Raised when an archive cannot be read""" class RegistryError(Exception): - """Raised when a registery operation with the archiving + """Raised when a registry operation with the archiving and unpacking registeries fails""" @@ -193,7 +193,7 @@ # * follow_symlinks=False, # * lchown() is unavailable, and # * either - # * fchownat() is unvailable or + # * fchownat() is unavailable or # * fchownat() doesn't implement AT_SYMLINK_NOFOLLOW. # (it returned ENOSUP.) # therefore we're out of options--we simply cannot chown the @@ -983,7 +983,7 @@ def disk_usage(path): """Return disk usage statistics about the given path. - Returned valus is a named tuple with attributes 'total', 'used' and + Returned values is a named tuple with attributes 'total', 'used' and 'free', which are the amount of total, used and free space, in bytes. """ total, free = nt._getdiskusage(path) diff --git a/Lib/ssl.py b/Lib/ssl.py --- a/Lib/ssl.py +++ b/Lib/ssl.py @@ -171,7 +171,7 @@ for frag in dn.split(r'.'): if frag.count('*') > max_wildcards: # Issue #17980: avoid denials of service by refusing more - # than one wildcard per fragment. A survery of established + # than one wildcard per fragment. A survey of established # policy among SSL implementations showed it to be a # reasonable choice. raise CertificateError( diff --git a/Lib/subprocess.py b/Lib/subprocess.py --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -188,7 +188,7 @@ started to execute, will be re-raised in the parent. Additionally, the exception object will have one extra attribute called 'child_traceback', which is a string containing traceback information -from the childs point of view. +from the child's point of view. The most common exception raised is OSError. This occurs, for example, when trying to execute a non-existent file. Applications diff --git a/Lib/tarfile.py b/Lib/tarfile.py --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -289,7 +289,7 @@ """General exception for extract errors.""" pass class ReadError(TarError): - """Exception for unreadble tar archives.""" + """Exception for unreadable tar archives.""" pass class CompressionError(TarError): """Exception for unavailable compression methods.""" diff --git a/Lib/tempfile.py b/Lib/tempfile.py --- a/Lib/tempfile.py +++ b/Lib/tempfile.py @@ -636,7 +636,7 @@ with TemporaryDirectory() as tmpdir: ... - Upon exiting the context, the directory and everthing contained + Upon exiting the context, the directory and everything contained in it are removed. """ diff --git a/Lib/threading.py b/Lib/threading.py --- a/Lib/threading.py +++ b/Lib/threading.py @@ -19,7 +19,7 @@ # Note regarding PEP 8 compliant names # This threading model was originally inspired by Java, and inherited # the convention of camelCase function and method names from that -# language. Those originaly names are not in any imminent danger of +# language. Those original names are not in any imminent danger of # being deprecated (even for Py3k),so this module provides them as an # alias for the PEP 8 compliant names # Note that using the new PEP 8 compliant names facilitates substitution diff --git a/Lib/turtle.py b/Lib/turtle.py --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -1278,7 +1278,7 @@ self._delayvalue = int(delay) def _incrementudc(self): - """Increment upadate counter.""" + """Increment update counter.""" if not TurtleScreen._RUNNING: TurtleScreen._RUNNNING = True raise Terminator @@ -2527,7 +2527,7 @@ self.screen = TurtleScreen(canvas) RawTurtle.screens.append(self.screen) else: - raise TurtleGraphicsError("bad cavas argument %s" % canvas) + raise TurtleGraphicsError("bad canvas argument %s" % canvas) screen = self.screen TNavigator.__init__(self, screen.mode()) @@ -2772,7 +2772,7 @@ def shapesize(self, stretch_wid=None, stretch_len=None, outline=None): """Set/return turtle's stretchfactors/outline. Set resizemode to "user". - Optinonal arguments: + Optional arguments: stretch_wid : positive number stretch_len : positive number outline : positive number @@ -3135,7 +3135,7 @@ def _goto(self, end): """Move the pen to the point end, thereby drawing a line - if pen is down. All other methodes for turtle movement depend + if pen is down. All other methods for turtle movement depend on this one. """ ## Version with undo-stuff -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 15:03:11 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 15:03:11 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4MTc4?= =?utf-8?q?=3A_Fix_ctypes_on_BSD=2E_dlmalloc=2Ec_was_compiled_twice_which_?= =?utf-8?q?broke?= Message-ID: <3cHM5W55t3zR65@mail.python.org> http://hg.python.org/cpython/rev/f09ca52747a6 changeset: 85223:f09ca52747a6 branch: 3.3 parent: 85221:e07f104133d5 user: Christian Heimes date: Sat Aug 17 15:01:54 2013 +0200 summary: Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke malloc weak symbols. files: Misc/NEWS | 3 +++ Modules/_ctypes/libffi/fficonfig.py.in | 1 - 2 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,9 @@ Library ------- +- Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke + malloc weak symbols. + - Issue #18709: Fix CVE-2013-4238. The SSL module now handles NULL bytes inside subjectAltName correctly. Formerly the module has used OpenSSL's GENERAL_NAME_print() function to get the string represention of ASN.1 diff --git a/Modules/_ctypes/libffi/fficonfig.py.in b/Modules/_ctypes/libffi/fficonfig.py.in --- a/Modules/_ctypes/libffi/fficonfig.py.in +++ b/Modules/_ctypes/libffi/fficonfig.py.in @@ -1,7 +1,6 @@ ffi_sources = """ src/prep_cif.c src/closures.c -src/dlmalloc.c """.split() ffi_platforms = { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 15:03:12 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 15:03:12 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318178=3A_Fix_ctypes_on_BSD=2E_dlmalloc=2Ec_was_?= =?utf-8?q?compiled_twice_which_broke?= Message-ID: <3cHM5X71kLz7Ljk@mail.python.org> http://hg.python.org/cpython/rev/bea2f12e899e changeset: 85224:bea2f12e899e parent: 85222:a9ee869cae40 parent: 85223:f09ca52747a6 user: Christian Heimes date: Sat Aug 17 15:02:05 2013 +0200 summary: Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke malloc weak symbols. files: Misc/NEWS | 3 +++ Modules/_ctypes/libffi/fficonfig.py.in | 1 - 2 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -28,6 +28,9 @@ Library ------- +- Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke + malloc weak symbols. + - Issue #18709: Fix CVE-2013-4238. The SSL module now handles NULL bytes inside subjectAltName correctly. Formerly the module has used OpenSSL's GENERAL_NAME_print() function to get the string represention of ASN.1 diff --git a/Modules/_ctypes/libffi/fficonfig.py.in b/Modules/_ctypes/libffi/fficonfig.py.in --- a/Modules/_ctypes/libffi/fficonfig.py.in +++ b/Modules/_ctypes/libffi/fficonfig.py.in @@ -1,7 +1,6 @@ ffi_sources = """ src/prep_cif.c src/closures.c -src/dlmalloc.c """.split() ffi_platforms = { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 15:03:14 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 15:03:14 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4MTc4?= =?utf-8?q?=3A_Fix_ctypes_on_BSD=2E_dlmalloc=2Ec_was_compiled_twice_which_?= =?utf-8?q?broke?= Message-ID: <3cHM5Z2Sgxz7LjV@mail.python.org> http://hg.python.org/cpython/rev/d4ac6eee7061 changeset: 85225:d4ac6eee7061 branch: 2.7 parent: 85214:bd2360476bdb user: Christian Heimes date: Sat Aug 17 15:01:54 2013 +0200 summary: Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke malloc weak symbols. files: Misc/NEWS | 3 +++ Modules/_ctypes/libffi/fficonfig.py.in | 1 - 2 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,9 @@ Library ------- +- Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke + malloc weak symbols. + - Issue #18709: Fix CVE-2013-4238. The SSL module now handles NULL bytes inside subjectAltName correctly. Formerly the module has used OpenSSL's GENERAL_NAME_print() function to get the string represention of ASN.1 diff --git a/Modules/_ctypes/libffi/fficonfig.py.in b/Modules/_ctypes/libffi/fficonfig.py.in --- a/Modules/_ctypes/libffi/fficonfig.py.in +++ b/Modules/_ctypes/libffi/fficonfig.py.in @@ -1,7 +1,6 @@ ffi_sources = """ src/prep_cif.c src/closures.c -src/dlmalloc.c """.split() ffi_platforms = { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 15:03:16 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 15:03:16 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAobWVyZ2UgMi43IC0+IDIuNyk6?= =?utf-8?q?_merge_2=2E7?= Message-ID: <3cHM5c17zSz7LjP@mail.python.org> http://hg.python.org/cpython/rev/c048b211f634 changeset: 85226:c048b211f634 branch: 2.7 parent: 85225:d4ac6eee7061 parent: 85220:d234dd21374a user: Christian Heimes date: Sat Aug 17 15:02:55 2013 +0200 summary: merge 2.7 files: Doc/howto/logging-cookbook.rst | 55 ++++++++-------- Doc/howto/logging.rst | 64 ++++++++++--------- Doc/library/logging.config.rst | 14 ++- Doc/library/logging.handlers.rst | 14 ++- Lib/Cookie.py | 2 +- Lib/_osx_support.py | 8 +- Lib/_strptime.py | 2 +- Lib/aifc.py | 2 +- Lib/calendar.py | 2 +- Lib/compiler/pyassem.py | 2 +- Lib/difflib.py | 6 +- Lib/doctest.py | 4 +- Lib/genericpath.py | 2 +- Lib/inspect.py | 2 +- Lib/lib-tk/turtle.py | 8 +- Lib/modulefinder.py | 2 +- Lib/multiprocessing/forking.py | 2 +- Lib/optparse.py | 2 +- Lib/pdb.py | 2 +- Lib/pickletools.py | 4 +- Lib/platform.py | 6 +- Lib/poplib.py | 2 +- Lib/rlcompleter.py | 2 +- Lib/subprocess.py | 2 +- Lib/tarfile.py | 2 +- Misc/ACKS | 1 + 26 files changed, 112 insertions(+), 102 deletions(-) diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst --- a/Doc/howto/logging-cookbook.rst +++ b/Doc/howto/logging-cookbook.rst @@ -97,11 +97,11 @@ Multiple handlers and formatters -------------------------------- -Loggers are plain Python objects. The :func:`addHandler` method has no minimum -or maximum quota for the number of handlers you may add. Sometimes it will be -beneficial for an application to log all messages of all severities to a text -file while simultaneously logging errors or above to the console. To set this -up, simply configure the appropriate handlers. The logging calls in the +Loggers are plain Python objects. The :meth:`~Logger.addHandler` method has no +minimum or maximum quota for the number of handlers you may add. Sometimes it +will be beneficial for an application to log all messages of all severities to a +text file while simultaneously logging errors or above to the console. To set +this up, simply configure the appropriate handlers. The logging calls in the application code will remain unchanged. Here is a slight modification to the previous simple module-based configuration example:: @@ -395,8 +395,9 @@ Note that there are some security issues with pickle in some scenarios. If these affect you, you can use an alternative serialization scheme by overriding -the :meth:`makePickle` method and implementing your alternative there, as -well as adapting the above script to use your alternative serialization. +the :meth:`~handlers.SocketHandler.makePickle` method and implementing your +alternative there, as well as adapting the above script to use your alternative +serialization. .. _context-info: @@ -404,6 +405,8 @@ Adding contextual information to your logging output ---------------------------------------------------- +.. currentmodule:: logging + Sometimes you want logging output to contain contextual information in addition to the parameters passed to the logging call. For example, in a networked application, it may be desirable to log client-specific information @@ -445,9 +448,9 @@ msg, kwargs = self.process(msg, kwargs) self.logger.debug(msg, *args, **kwargs) -The :meth:`process` method of :class:`LoggerAdapter` is where the contextual -information is added to the logging output. It's passed the message and -keyword arguments of the logging call, and it passes back (potentially) +The :meth:`~LoggerAdapter.process` method of :class:`LoggerAdapter` is where the +contextual information is added to the logging output. It's passed the message +and keyword arguments of the logging call, and it passes back (potentially) modified versions of these to use in the call to the underlying logger. The default implementation of this method leaves the message alone, but inserts an 'extra' key in the keyword argument whose value is the dict-like object @@ -459,8 +462,8 @@ customized strings with your :class:`Formatter` instances which know about the keys of the dict-like object. If you need a different method, e.g. if you want to prepend or append the contextual information to the message string, -you just need to subclass :class:`LoggerAdapter` and override :meth:`process` -to do what you need. Here is a simple example:: +you just need to subclass :class:`LoggerAdapter` and override +:meth:`~LoggerAdapter.process` to do what you need. Here is a simple example:: class CustomAdapter(logging.LoggerAdapter): """ @@ -569,25 +572,23 @@ *multiple processes* is *not* supported, because there is no standard way to serialize access to a single file across multiple processes in Python. If you need to log to a single file from multiple processes, one way of doing this is -to have all the processes log to a :class:`SocketHandler`, and have a separate -process which implements a socket server which reads from the socket and logs -to file. (If you prefer, you can dedicate one thread in one of the existing -processes to perform this function.) :ref:`This section ` -documents this approach in more detail and includes a working socket receiver -which can be used as a starting point for you to adapt in your own -applications. +to have all the processes log to a :class:`~handlers.SocketHandler`, and have a +separate process which implements a socket server which reads from the socket +and logs to file. (If you prefer, you can dedicate one thread in one of the +existing processes to perform this function.) +:ref:`This section ` documents this approach in more detail and +includes a working socket receiver which can be used as a starting point for you +to adapt in your own applications. If you are using a recent version of Python which includes the :mod:`multiprocessing` module, you could write your own handler which uses the -:class:`Lock` class from this module to serialize access to the file from -your processes. The existing :class:`FileHandler` and subclasses do not make -use of :mod:`multiprocessing` at present, though they may do so in the future. -Note that at present, the :mod:`multiprocessing` module does not provide +:class:`~multiprocessing.Lock` class from this module to serialize access to the +file from your processes. The existing :class:`FileHandler` and subclasses do +not make use of :mod:`multiprocessing` at present, though they may do so in the +future. Note that at present, the :mod:`multiprocessing` module does not provide working lock functionality on all platforms (see http://bugs.python.org/issue3770). -.. currentmodule:: logging.handlers - Using file rotation ------------------- @@ -599,7 +600,7 @@ file and log to that. You may want to keep a certain number of these files, and when that many files have been created, rotate the files so that the number of files and the size of the files both remain bounded. For this usage pattern, the -logging package provides a :class:`RotatingFileHandler`:: +logging package provides a :class:`~handlers.RotatingFileHandler`:: import glob import logging @@ -650,7 +651,7 @@ Below is an example of a logging configuration dictionary - it's taken from the `documentation on the Django project `_. -This dictionary is passed to :func:`~logging.config.dictConfig` to put the configuration into effect:: +This dictionary is passed to :func:`~config.dictConfig` to put the configuration into effect:: LOGGING = { 'version': 1, diff --git a/Doc/howto/logging.rst b/Doc/howto/logging.rst --- a/Doc/howto/logging.rst +++ b/Doc/howto/logging.rst @@ -469,12 +469,13 @@ :class:`~logging.Handler` objects are responsible for dispatching the appropriate log messages (based on the log messages' severity) to the handler's -specified destination. Logger objects can add zero or more handler objects to -themselves with an :func:`addHandler` method. As an example scenario, an -application may want to send all log messages to a log file, all log messages -of error or higher to stdout, and all messages of critical to an email address. -This scenario requires three individual handlers where each handler is -responsible for sending messages of a specific severity to a specific location. +specified destination. :class:`Logger` objects can add zero or more handler +objects to themselves with an :meth:`~Logger.addHandler` method. As an example +scenario, an application may want to send all log messages to a log file, all +log messages of error or higher to stdout, and all messages of critical to an +email address. This scenario requires three individual handlers where each +handler is responsible for sending messages of a specific severity to a specific +location. The standard library includes quite a few handler types (see :ref:`useful-handlers`); the tutorials use mainly :class:`StreamHandler` and @@ -485,16 +486,17 @@ developers who are using the built-in handler objects (that is, not creating custom handlers) are the following configuration methods: -* The :meth:`Handler.setLevel` method, just as in logger objects, specifies the +* The :meth:`~Handler.setLevel` method, just as in logger objects, specifies the lowest severity that will be dispatched to the appropriate destination. Why are there two :func:`setLevel` methods? The level set in the logger determines which severity of messages it will pass to its handlers. The level set in each handler determines which messages that handler will send on. -* :func:`setFormatter` selects a Formatter object for this handler to use. +* :meth:`~Handler.setFormatter` selects a Formatter object for this handler to + use. -* :func:`addFilter` and :func:`removeFilter` respectively configure and - deconfigure filter objects on handlers. +* :meth:`~Handler.addFilter` and :meth:`~Handler.removeFilter` respectively + configure and deconfigure filter objects on handlers. Application code should not directly instantiate and use instances of :class:`Handler`. Instead, the :class:`Handler` class is a base class that @@ -918,16 +920,16 @@ use with the % operator and a dictionary. For formatting multiple messages in a batch, instances of -:class:`BufferingFormatter` can be used. In addition to the format string (which -is applied to each message in the batch), there is provision for header and -trailer format strings. +:class:`~handlers.BufferingFormatter` can be used. In addition to the format +string (which is applied to each message in the batch), there is provision for +header and trailer format strings. When filtering based on logger level and/or handler level is not enough, instances of :class:`Filter` can be added to both :class:`Logger` and -:class:`Handler` instances (through their :meth:`addFilter` method). Before -deciding to process a message further, both loggers and handlers consult all -their filters for permission. If any filter returns a false value, the message -is not processed further. +:class:`Handler` instances (through their :meth:`~Handler.addFilter` method). +Before deciding to process a message further, both loggers and handlers consult +all their filters for permission. If any filter returns a false value, the +message is not processed further. The basic :class:`Filter` functionality allows filtering by specific logger name. If this feature is used, messages sent to the named logger and its @@ -945,19 +947,20 @@ cause the application using logging to terminate prematurely. :class:`SystemExit` and :class:`KeyboardInterrupt` exceptions are never -swallowed. Other exceptions which occur during the :meth:`emit` method of a -:class:`Handler` subclass are passed to its :meth:`handleError` method. +swallowed. Other exceptions which occur during the :meth:`~Handler.emit` method +of a :class:`Handler` subclass are passed to its :meth:`~Handler.handleError` +method. -The default implementation of :meth:`handleError` in :class:`Handler` checks -to see if a module-level variable, :data:`raiseExceptions`, is set. If set, a -traceback is printed to :data:`sys.stderr`. If not set, the exception is swallowed. +The default implementation of :meth:`~Handler.handleError` in :class:`Handler` +checks to see if a module-level variable, :data:`raiseExceptions`, is set. If +set, a traceback is printed to :data:`sys.stderr`. If not set, the exception is +swallowed. .. note:: The default value of :data:`raiseExceptions` is ``True``. This is because during development, you typically want to be notified of any exceptions that occur. It's advised that you set :data:`raiseExceptions` to ``False`` for production usage. -.. currentmodule:: logging .. _arbitrary-object-messages: @@ -967,11 +970,11 @@ In the preceding sections and examples, it has been assumed that the message passed when logging the event is a string. However, this is not the only possibility. You can pass an arbitrary object as a message, and its -:meth:`__str__` method will be called when the logging system needs to convert -it to a string representation. In fact, if you want to, you can avoid +:meth:`~object.__str__` method will be called when the logging system needs to +convert it to a string representation. In fact, if you want to, you can avoid computing a string representation altogether - for example, the -:class:`SocketHandler` emits an event by pickling it and sending it over the -wire. +:class:`~handlers.SocketHandler` emits an event by pickling it and sending it +over the wire. Optimization @@ -980,9 +983,10 @@ Formatting of message arguments is deferred until it cannot be avoided. However, computing the arguments passed to the logging method can also be expensive, and you may want to avoid doing it if the logger will just throw -away your event. To decide what to do, you can call the :meth:`isEnabledFor` -method which takes a level argument and returns true if the event would be -created by the Logger for that level of call. You can write code like this:: +away your event. To decide what to do, you can call the +:meth:`~Logger.isEnabledFor` method which takes a level argument and returns +true if the event would be created by the Logger for that level of call. +You can write code like this:: if logger.isEnabledFor(logging.DEBUG): logger.debug('Message with %s, %s', expensive_func1(), diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst --- a/Doc/library/logging.config.rst +++ b/Doc/library/logging.config.rst @@ -104,8 +104,9 @@ configurations. If no port is specified, the module's default :const:`DEFAULT_LOGGING_CONFIG_PORT` is used. Logging configurations will be sent as a file suitable for processing by :func:`fileConfig`. Returns a - :class:`Thread` instance on which you can call :meth:`start` to start the - server, and which you can :meth:`join` when appropriate. To stop the server, + :class:`~threading.Thread` instance on which you can call + :meth:`~threading.Thread.start` to start the server, and which you can + :meth:`~threading.Thread.join` when appropriate. To stop the server, call :func:`stopListening`. To send a configuration to the socket, read in the configuration file and @@ -169,11 +170,11 @@ * *formatters* - the corresponding value will be a dict in which each key is a formatter id and each value is a dict describing how to - configure the corresponding Formatter instance. + configure the corresponding :class:`~logging.Formatter` instance. The configuring dict is searched for keys ``format`` and ``datefmt`` (with defaults of ``None``) and these are used to construct a - :class:`logging.Formatter` instance. + :class:`~logging.Formatter` instance. * *filters* - the corresponding value will be a dict in which each key is a filter id and each value is a dict describing how to configure @@ -711,8 +712,9 @@ The ``class`` entry is optional. It indicates the name of the formatter's class (as a dotted module and class name.) This option is useful for instantiating a -:class:`Formatter` subclass. Subclasses of :class:`Formatter` can present -exception tracebacks in an expanded or condensed format. +:class:`~logging.Formatter` subclass. Subclasses of +:class:`~logging.Formatter` can present exception tracebacks in an expanded or +condensed format. .. note:: Due to the use of :func:`eval` as described above, there are potential security risks which result from using the :func:`listen` to send diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -53,8 +53,8 @@ .. method:: flush() Flushes the stream by calling its :meth:`flush` method. Note that the - :meth:`close` method is inherited from :class:`Handler` and so does - no output, so an explicit :meth:`flush` call may be needed at times. + :meth:`close` method is inherited from :class:`~logging.Handler` and so + does no output, so an explicit :meth:`flush` call may be needed at times. .. _file-handler: @@ -142,8 +142,8 @@ This handler is not appropriate for use under Windows, because under Windows open log files cannot be moved or renamed - logging opens the files with exclusive locks - and so there is no need for such a handler. Furthermore, -*ST_INO* is not supported under Windows; :func:`stat` always returns zero for -this value. +*ST_INO* is not supported under Windows; :func:`~os.stat` always returns zero +for this value. .. class:: WatchedFileHandler(filename[,mode[, encoding[, delay]]]) @@ -305,7 +305,8 @@ binary format. If there is an error with the socket, silently drops the packet. If the connection was previously lost, re-establishes the connection. To unpickle the record at the receiving end into a - :class:`LogRecord`, use the :func:`makeLogRecord` function. + :class:`~logging.LogRecord`, use the :func:`~logging.makeLogRecord` + function. .. method:: handleError() @@ -383,7 +384,8 @@ Pickles the record's attribute dictionary and writes it to the socket in binary format. If there is an error with the socket, silently drops the packet. To unpickle the record at the receiving end into a - :class:`LogRecord`, use the :func:`makeLogRecord` function. + :class:`~logging.LogRecord`, use the :func:`~logging.makeLogRecord` + function. .. method:: makeSocket() diff --git a/Lib/Cookie.py b/Lib/Cookie.py --- a/Lib/Cookie.py +++ b/Lib/Cookie.py @@ -238,7 +238,7 @@ # a two-way quoting algorithm. Any non-text character is translated # into a 4 character sequence: a forward-slash followed by the # three-digit octal equivalent of the character. Any '\' or '"' is -# quoted with a preceeding '\' slash. +# quoted with a preceding '\' slash. # # These are taken from RFC2068 and RFC2109. # _LegalChars is the list of chars which don't require "'s diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py --- a/Lib/_osx_support.py +++ b/Lib/_osx_support.py @@ -152,7 +152,7 @@ # are not installed. # # Futhermore, the compiler that can be used varies between - # Xcode releases. Upto Xcode 4 it was possible to use 'gcc-4.2' + # Xcode releases. Up to Xcode 4 it was possible to use 'gcc-4.2' # as the compiler, after that 'clang' should be used because # gcc-4.2 is either not present, or a copy of 'llvm-gcc' that # miscompiles Python. @@ -192,7 +192,7 @@ if cc != oldcc: # Found a replacement compiler. - # Modify config vars using new compiler, if not already explictly + # Modify config vars using new compiler, if not already explicitly # overriden by an env variable, preserving additional arguments. for cv in _COMPILER_CONFIG_VARS: if cv in _config_vars and cv not in os.environ: @@ -274,7 +274,7 @@ # compile an extension using an SDK that is not present # on the current machine it is better to not use an SDK # than to fail. This is particularly important with - # the standalong Command Line Tools alternative to a + # the standalone Command Line Tools alternative to a # full-blown Xcode install since the CLT packages do not # provide SDKs. If the SDK is not present, it is assumed # that the header files and dev libs have been installed @@ -378,7 +378,7 @@ compilers are present, i.e. when installing pure Python dists. Customization of compiler paths and detection of unavailable archs is deferred - until the first extention module build is + until the first extension module build is requested (in distutils.sysconfig.customize_compiler). Currently called from distutils.sysconfig diff --git a/Lib/_strptime.py b/Lib/_strptime.py --- a/Lib/_strptime.py +++ b/Lib/_strptime.py @@ -222,7 +222,7 @@ """Convert a list to a regex string for matching a directive. Want possible matching values to be from longest to shortest. This - prevents the possibility of a match occuring for a value that also + prevents the possibility of a match occurring for a value that also a substring of a larger value that should have matched (e.g., 'abc' matching when 'abcdef' should have been the match). diff --git a/Lib/aifc.py b/Lib/aifc.py --- a/Lib/aifc.py +++ b/Lib/aifc.py @@ -123,7 +123,7 @@ compression type, and then write audio frames using writeframesraw. When all frames have been written, either call writeframes('') or close() to patch up the sizes in the header. -Marks can be added anytime. If there are any marks, ypu must call +Marks can be added anytime. If there are any marks, you must call close() after all frames have been written. The close() method is called automatically when the class instance is destroyed. diff --git a/Lib/calendar.py b/Lib/calendar.py --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -220,7 +220,7 @@ def yeardatescalendar(self, year, width=3): """ Return the data for the specified year ready for formatting. The return - value is a list of month rows. Each month row contains upto width months. + value is a list of month rows. Each month row contains up to width months. Each month contains between 4 and 6 weeks and each week contains 1-7 days. Days are datetime.date objects. """ diff --git a/Lib/compiler/pyassem.py b/Lib/compiler/pyassem.py --- a/Lib/compiler/pyassem.py +++ b/Lib/compiler/pyassem.py @@ -125,7 +125,7 @@ # Make sure every block appears in dominators, even if no # other block must precede it. dominators.setdefault(b, set()) - # preceeding blocks dominate following blocks + # preceding blocks dominate following blocks for c in b.get_followers(): while 1: dominators.setdefault(c, set()).add(b) diff --git a/Lib/difflib.py b/Lib/difflib.py --- a/Lib/difflib.py +++ b/Lib/difflib.py @@ -586,7 +586,7 @@ def get_grouped_opcodes(self, n=3): """ Isolate change clusters by eliminating ranges with no changes. - Return a generator of groups with upto n lines of context. + Return a generator of groups with up to n lines of context. Each group is in the same format as returned by get_opcodes(). >>> from pprint import pprint @@ -1361,7 +1361,7 @@ linejunk -- passed on to ndiff (see ndiff documentation) charjunk -- passed on to ndiff (see ndiff documentation) - This function returns an interator which returns a tuple: + This function returns an iterator which returns a tuple: (from line tuple, to line tuple, boolean flag) from/to line tuple -- (line num, line text) @@ -1963,7 +1963,7 @@ self._make_prefix() # change tabs to spaces before it gets more difficult after we insert - # markkup + # markup fromlines,tolines = self._tab_newline_replace(fromlines,tolines) # create diffs iterator which generates side by side from/to data diff --git a/Lib/doctest.py b/Lib/doctest.py --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -424,7 +424,7 @@ zero-based, with respect to the beginning of the DocTest. - indent: The example's indentation in the DocTest string. - I.e., the number of space characters that preceed the + I.e., the number of space characters that precede the example's first prompt. - options: A dictionary mapping from option flags to True or @@ -895,7 +895,7 @@ if '__name__' not in globs: globs['__name__'] = '__main__' # provide a default module name - # Recursively expore `obj`, extracting DocTests. + # Recursively explore `obj`, extracting DocTests. tests = [] self._find(tests, obj, name, module, source_lines, globs, {}) # Sort the tests by alpha order of names, for consistency in diff --git a/Lib/genericpath.py b/Lib/genericpath.py --- a/Lib/genericpath.py +++ b/Lib/genericpath.py @@ -22,7 +22,7 @@ # This follows symbolic links, so both islink() and isdir() can be true -# for the same path ono systems that support symlinks +# for the same path on systems that support symlinks def isfile(path): """Test whether a path is a regular file""" try: diff --git a/Lib/inspect.py b/Lib/inspect.py --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -165,7 +165,7 @@ """Return true if the object is a generator. Generator objects provide these attributes: - __iter__ defined to support interation over container + __iter__ defined to support iteration over container close raises a new GeneratorExit exception inside the generator to terminate the iteration gi_code code object diff --git a/Lib/lib-tk/turtle.py b/Lib/lib-tk/turtle.py --- a/Lib/lib-tk/turtle.py +++ b/Lib/lib-tk/turtle.py @@ -1233,7 +1233,7 @@ self._delayvalue = int(delay) def _incrementudc(self): - """Increment upadate counter.""" + """Increment update counter.""" if not TurtleScreen._RUNNING: TurtleScreen._RUNNNING = True raise Terminator @@ -2439,7 +2439,7 @@ self.screen = TurtleScreen(canvas) RawTurtle.screens.append(self.screen) else: - raise TurtleGraphicsError("bad cavas argument %s" % canvas) + raise TurtleGraphicsError("bad canvas argument %s" % canvas) screen = self.screen TNavigator.__init__(self, screen.mode()) @@ -2684,7 +2684,7 @@ def shapesize(self, stretch_wid=None, stretch_len=None, outline=None): """Set/return turtle's stretchfactors/outline. Set resizemode to "user". - Optinonal arguments: + Optional arguments: stretch_wid : positive number stretch_len : positive number outline : positive number @@ -2975,7 +2975,7 @@ def _goto(self, end): """Move the pen to the point end, thereby drawing a line - if pen is down. All other methodes for turtle movement depend + if pen is down. All other methods for turtle movement depend on this one. """ ## Version mit undo-stuff diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py --- a/Lib/modulefinder.py +++ b/Lib/modulefinder.py @@ -516,7 +516,7 @@ # Print modules that may be missing, but then again, maybe not... if maybe: print - print "Submodules thay appear to be missing, but could also be", + print "Submodules that appear to be missing, but could also be", print "global names in the parent package:" for name in maybe: mods = self.badmodules[name].keys() diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py --- a/Lib/multiprocessing/forking.py +++ b/Lib/multiprocessing/forking.py @@ -367,7 +367,7 @@ def main(): ''' - Run code specifed by data received over pipe + Run code specified by data received over pipe ''' assert is_forking(sys.argv) diff --git a/Lib/optparse.py b/Lib/optparse.py --- a/Lib/optparse.py +++ b/Lib/optparse.py @@ -1471,7 +1471,7 @@ """_match_long_opt(opt : string) -> string Determine which long option string 'opt' matches, ie. which one - it is an unambiguous abbrevation for. Raises BadOptionError if + it is an unambiguous abbreviation for. Raises BadOptionError if 'opt' doesn't unambiguously match any long option string. """ return _match_abbrev(opt, self._long_opt) diff --git a/Lib/pdb.py b/Lib/pdb.py --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -1095,7 +1095,7 @@ def help_run(self): print """run [args...] Restart the debugged python program. If a string is supplied, it is -splitted with "shlex" and the result is used as the new sys.argv. +split with "shlex" and the result is used as the new sys.argv. History, breakpoints, actions and debugger options are preserved. "restart" is an alias for "run".""" diff --git a/Lib/pickletools.py b/Lib/pickletools.py --- a/Lib/pickletools.py +++ b/Lib/pickletools.py @@ -804,7 +804,7 @@ obtype=StackObject, doc="""An object representing a contiguous slice of the stack. - This is used in conjuction with markobject, to represent all + This is used in conjunction with markobject, to represent all of the stack following the topmost markobject. For example, the POP_MARK opcode changes the stack from @@ -1929,7 +1929,7 @@ stack = [] # crude emulation of unpickler stack if memo is None: - memo = {} # crude emulation of unpicker memo + memo = {} # crude emulation of unpickler memo maxproto = -1 # max protocol number seen markstack = [] # bytecode positions of MARK opcodes indentchunk = ' ' * indentlevel diff --git a/Lib/platform.py b/Lib/platform.py --- a/Lib/platform.py +++ b/Lib/platform.py @@ -228,7 +228,7 @@ return 'OpenLinux',pkg[1],id if os.path.isdir('/usr/lib/setup'): - # Check for slackware verson tag file (thanks to Greg Andruk) + # Check for slackware version tag file (thanks to Greg Andruk) verfiles = os.listdir('/usr/lib/setup') for n in range(len(verfiles)-1, -1, -1): if verfiles[n][:14] != 'slack-version-': @@ -280,7 +280,7 @@ if m is not None: return tuple(m.groups()) - # Unkown format... take the first two words + # Unknown format... take the first two words l = string.split(string.strip(firstline)) if l: version = l[0] @@ -800,7 +800,7 @@ versioninfo, machine) with versioninfo being a tuple (version, dev_stage, non_release_version). - Entries which cannot be determined are set to the paramter values + Entries which cannot be determined are set to the parameter values which default to ''. All tuple entries are strings. """ diff --git a/Lib/poplib.py b/Lib/poplib.py --- a/Lib/poplib.py +++ b/Lib/poplib.py @@ -321,7 +321,7 @@ hostname - the hostname of the pop3 over ssl server port - port number - keyfile - PEM formatted file that countains your private key + keyfile - PEM formatted file that contains your private key certfile - PEM formatted certificate chain file See the methods of the parent class POP3 for more documentation. diff --git a/Lib/rlcompleter.py b/Lib/rlcompleter.py --- a/Lib/rlcompleter.py +++ b/Lib/rlcompleter.py @@ -116,7 +116,7 @@ """Compute matches when text contains a dot. Assuming the text is of the form NAME.NAME....[NAME], and is - evaluatable in self.namespace, it will be evaluated and its attributes + evaluable in self.namespace, it will be evaluated and its attributes (as revealed by dir()) are used as possible completions. (For class instances, class members are also considered.) diff --git a/Lib/subprocess.py b/Lib/subprocess.py --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -143,7 +143,7 @@ started to execute, will be re-raised in the parent. Additionally, the exception object will have one extra attribute called 'child_traceback', which is a string containing traceback information -from the childs point of view. +from the child's point of view. The most common exception raised is OSError. This occurs, for example, when trying to execute a non-existent file. Applications diff --git a/Lib/tarfile.py b/Lib/tarfile.py --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -330,7 +330,7 @@ """General exception for extract errors.""" pass class ReadError(TarError): - """Exception for unreadble tar archives.""" + """Exception for unreadable tar archives.""" pass class CompressionError(TarError): """Exception for unavailable compression methods.""" diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1015,6 +1015,7 @@ Victor Terr?n Richard M. Tew Tobias Thelen +F?vry Thibault Lowe Thiderman Nicolas M. Thi?ry James Thomas -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 15:14:25 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 17 Aug 2013 15:14:25 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogIzE4NzQxOiBmaXgg?= =?utf-8?q?more_typos=2E__Patch_by_F=C3=A9vry_Thibault=2E?= Message-ID: <3cHMLT4h4Nz7LjR@mail.python.org> http://hg.python.org/cpython/rev/5295ed192ffd changeset: 85227:5295ed192ffd branch: 2.7 user: Ezio Melotti date: Sat Aug 17 16:07:38 2013 +0300 summary: #18741: fix more typos. Patch by F?vry Thibault. files: Lib/ctypes/test/runtests.py | 2 +- Lib/ctypes/test/test_cfuncs.py | 2 +- Lib/ctypes/test/test_numbers.py | 2 +- Lib/ctypes/test/test_refcounts.py | 6 +++--- Lib/ctypes/test/test_structures.py | 2 +- Lib/distutils/command/sdist.py | 2 +- Lib/distutils/tests/test_build_clib.py | 2 +- Lib/idlelib/idle_test/mock_tk.py | 8 ++++---- Lib/idlelib/idle_test/test_formatparagraph.py | 2 +- Lib/lib2to3/fixes/fix_itertools.py | 4 ++-- Lib/lib2to3/fixes/fix_metaclass.py | 2 +- Lib/lib2to3/tests/test_parser.py | 2 +- Lib/sqlite3/test/regression.py | 6 ++++-- Lib/sqlite3/test/types.py | 2 +- Lib/test/test_support.py | 4 ++-- Lib/unittest/test/test_loader.py | 2 +- 16 files changed, 26 insertions(+), 24 deletions(-) diff --git a/Lib/ctypes/test/runtests.py b/Lib/ctypes/test/runtests.py --- a/Lib/ctypes/test/runtests.py +++ b/Lib/ctypes/test/runtests.py @@ -2,7 +2,7 @@ Run all tests found in this directory, and print a summary of the results. Command line flags: - -q quiet mode: don't prnt anything while the tests are running + -q quiet mode: don't print anything while the tests are running -r run tests repeatedly, look for refcount leaks -u Add resources to the lits of allowed resources. '*' allows all diff --git a/Lib/ctypes/test/test_cfuncs.py b/Lib/ctypes/test/test_cfuncs.py --- a/Lib/ctypes/test/test_cfuncs.py +++ b/Lib/ctypes/test/test_cfuncs.py @@ -188,7 +188,7 @@ self.assertEqual(self._dll.tv_i(-42), None) self.assertEqual(self.S(), -42) -# The following repeates the above tests with stdcall functions (where +# The following repeats the above tests with stdcall functions (where # they are available) try: WinDLL diff --git a/Lib/ctypes/test/test_numbers.py b/Lib/ctypes/test/test_numbers.py --- a/Lib/ctypes/test/test_numbers.py +++ b/Lib/ctypes/test/test_numbers.py @@ -212,7 +212,7 @@ def test_init(self): # c_int() can be initialized from Python's int, and c_int. - # Not from c_long or so, which seems strange, abd should + # Not from c_long or so, which seems strange, abc should # probably be changed: self.assertRaises(TypeError, c_int, c_long(42)) diff --git a/Lib/ctypes/test/test_refcounts.py b/Lib/ctypes/test/test_refcounts.py --- a/Lib/ctypes/test/test_refcounts.py +++ b/Lib/ctypes/test/test_refcounts.py @@ -41,7 +41,7 @@ # this is the standard refcount for func self.assertEqual(grc(func), 2) - # the CFuncPtr instance holds atr least one refcount on func: + # the CFuncPtr instance holds at least one refcount on func: f = OtherCallback(func) self.assertTrue(grc(func) > 2) @@ -58,7 +58,7 @@ x = X() x.a = OtherCallback(func) - # the CFuncPtr instance holds atr least one refcount on func: + # the CFuncPtr instance holds at least one refcount on func: self.assertTrue(grc(func) > 2) # and may release it again @@ -71,7 +71,7 @@ f = OtherCallback(func) - # the CFuncPtr instance holds atr least one refcount on func: + # the CFuncPtr instance holds at least one refcount on func: self.assertTrue(grc(func) > 2) # create a cycle diff --git a/Lib/ctypes/test/test_structures.py b/Lib/ctypes/test/test_structures.py --- a/Lib/ctypes/test/test_structures.py +++ b/Lib/ctypes/test/test_structures.py @@ -108,7 +108,7 @@ def test_emtpy(self): # I had problems with these # - # Although these are patological cases: Empty Structures! + # Although these are pathological cases: Empty Structures! class X(Structure): _fields_ = [] diff --git a/Lib/distutils/command/sdist.py b/Lib/distutils/command/sdist.py --- a/Lib/distutils/command/sdist.py +++ b/Lib/distutils/command/sdist.py @@ -183,7 +183,7 @@ depends on the user's options. """ # new behavior when using a template: - # the file list is recalculated everytime because + # the file list is recalculated every time because # even if MANIFEST.in or setup.py are not changed # the user might have added some files in the tree that # need to be included. diff --git a/Lib/distutils/tests/test_build_clib.py b/Lib/distutils/tests/test_build_clib.py --- a/Lib/distutils/tests/test_build_clib.py +++ b/Lib/distutils/tests/test_build_clib.py @@ -77,7 +77,7 @@ cmd.compiler = FakeCompiler() - # build_libraries is also doing a bit of typoe checking + # build_libraries is also doing a bit of typo checking lib = [('name', {'sources': 'notvalid'})] self.assertRaises(DistutilsSetupError, cmd.build_libraries, lib) diff --git a/Lib/idlelib/idle_test/mock_tk.py b/Lib/idlelib/idle_test/mock_tk.py --- a/Lib/idlelib/idle_test/mock_tk.py +++ b/Lib/idlelib/idle_test/mock_tk.py @@ -35,7 +35,7 @@ """Mock for tkinter.messagebox with an Mbox_func for each function. This module was 'tkMessageBox' in 2.x; hence the 'import as' in 3.x. - Example usage in test_module.py for testing functios in module.py: + Example usage in test_module.py for testing functions in module.py: --- from idlelib.idle_test.mock_tk import Mbox import module @@ -98,7 +98,7 @@ This implements .index without converting the result back to a string. The result is contrained by the number of lines and linelengths of - self.data. For many indexes, the result is initally (1, 0). + self.data. For many indexes, the result is initially (1, 0). The input index may have any of several possible forms: * line.char float: converted to 'line.char' string; @@ -149,7 +149,7 @@ -1: position before terminal \n; for .insert(), .delete 0: position after terminal \n; for .get, .delete index 1 - 1: same viewed as begininning of non-existent next line (for .index) + 1: same viewed as beginning of non-existent next line (for .index) ''' n = len(self.data) if endflag == 1: @@ -271,7 +271,7 @@ "Scroll screen to make the character at INDEX is visible." pass - # The following is a Misc method inheritet by Text. + # The following is a Misc method inherited by Text. # It should properly go in a Misc mock, but is included here for now. def bind(sequence=None, func=None, add=None): diff --git a/Lib/idlelib/idle_test/test_formatparagraph.py b/Lib/idlelib/idle_test/test_formatparagraph.py --- a/Lib/idlelib/idle_test/test_formatparagraph.py +++ b/Lib/idlelib/idle_test/test_formatparagraph.py @@ -244,7 +244,7 @@ """Test the formatting of text inside a Text widget. This is done with FormatParagraph.format.paragraph_event, - which calls funtions in the module as appropriate. + which calls functions in the module as appropriate. """ test_string = ( " '''this is a test of a reformat for a triple " diff --git a/Lib/lib2to3/fixes/fix_itertools.py b/Lib/lib2to3/fixes/fix_itertools.py --- a/Lib/lib2to3/fixes/fix_itertools.py +++ b/Lib/lib2to3/fixes/fix_itertools.py @@ -34,8 +34,8 @@ # Remove the 'itertools' prefix = it.prefix it.remove() - # Replace the node wich contains ('.', 'function') with the - # function (to be consistant with the second part of the pattern) + # Replace the node which contains ('.', 'function') with the + # function (to be consistent with the second part of the pattern) dot.remove() func.parent.replace(func) diff --git a/Lib/lib2to3/fixes/fix_metaclass.py b/Lib/lib2to3/fixes/fix_metaclass.py --- a/Lib/lib2to3/fixes/fix_metaclass.py +++ b/Lib/lib2to3/fixes/fix_metaclass.py @@ -71,7 +71,7 @@ def fixup_simple_stmt(parent, i, stmt_node): """ if there is a semi-colon all the parts count as part of the same simple_stmt. We just want the __metaclass__ part so we move - everything efter the semi-colon into its own simple_stmt node + everything after the semi-colon into its own simple_stmt node """ for semi_ind, node in enumerate(stmt_node.children): if node.type == token.SEMI: # *sigh* diff --git a/Lib/lib2to3/tests/test_parser.py b/Lib/lib2to3/tests/test_parser.py --- a/Lib/lib2to3/tests/test_parser.py +++ b/Lib/lib2to3/tests/test_parser.py @@ -73,7 +73,7 @@ self.invalid_syntax("raise E from") -# Adapated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef +# Adaptated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef class TestFunctionAnnotations(GrammarTest): def test_1(self): self.validate("""def f(x) -> list: pass""") diff --git a/Lib/sqlite3/test/regression.py b/Lib/sqlite3/test/regression.py --- a/Lib/sqlite3/test/regression.py +++ b/Lib/sqlite3/test/regression.py @@ -159,7 +159,8 @@ def CheckCursorConstructorCallCheck(self): """ - Verifies that cursor methods check wether base class __init__ was called. + Verifies that cursor methods check whether base class __init__ was + called. """ class Cursor(sqlite.Cursor): def __init__(self, con): @@ -177,7 +178,8 @@ def CheckConnectionConstructorCallCheck(self): """ - Verifies that connection methods check wether base class __init__ was called. + Verifies that connection methods check whether base class __init__ was + called. """ class Connection(sqlite.Connection): def __init__(self, name): diff --git a/Lib/sqlite3/test/types.py b/Lib/sqlite3/test/types.py --- a/Lib/sqlite3/test/types.py +++ b/Lib/sqlite3/test/types.py @@ -244,7 +244,7 @@ self.assertEqual(type(value), float) def CheckNumber2(self): - """Checks wether converter names are cut off at '(' characters""" + """Checks whether converter names are cut off at '(' characters""" self.cur.execute("insert into test(n2) values (5)") value = self.cur.execute("select n2 from test").fetchone()[0] # if the converter is not used, it's an int instead of a float diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -184,7 +184,7 @@ if sys.platform.startswith("win"): def _waitfor(func, pathname, waitall=False): - # Peform the operation + # Perform the operation func(pathname) # Now setup the wait loop if waitall: @@ -200,7 +200,7 @@ # required when contention occurs. timeout = 0.001 while timeout < 1.0: - # Note we are only testing for the existance of the file(s) in + # Note we are only testing for the existence of the file(s) in # the contents of the directory regardless of any security or # access rights. If we have made it this far, we have sufficient # permissions to do that much using Python's equivalent of the diff --git a/Lib/unittest/test/test_loader.py b/Lib/unittest/test/test_loader.py --- a/Lib/unittest/test/test_loader.py +++ b/Lib/unittest/test/test_loader.py @@ -324,7 +324,7 @@ # Does loadTestsFromName raise TypeError when the `module` argument # isn't a module object? # - # XXX Accepts the not-a-module object, ignorning the object's type + # XXX Accepts the not-a-module object, ignoring the object's type # This should raise an exception or the method name should be changed # # XXX Some people are relying on this, so keep it for now -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 15:14:27 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 17 Aug 2013 15:14:27 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NzQxOiBmaXgg?= =?utf-8?q?more_typos=2E__Patch_by_F=C3=A9vry_Thibault=2E?= Message-ID: <3cHMLW1t9vz7Lk4@mail.python.org> http://hg.python.org/cpython/rev/9e4685d703d4 changeset: 85228:9e4685d703d4 branch: 3.3 parent: 85223:f09ca52747a6 user: Ezio Melotti date: Sat Aug 17 16:11:40 2013 +0300 summary: #18741: fix more typos. Patch by F?vry Thibault. files: Lib/concurrent/futures/process.py | 4 +- Lib/ctypes/test/runtests.py | 2 +- Lib/ctypes/test/test_cfuncs.py | 2 +- Lib/ctypes/test/test_numbers.py | 2 +- Lib/ctypes/test/test_refcounts.py | 6 ++-- Lib/ctypes/test/test_structures.py | 2 +- Lib/distutils/command/install.py | 2 +- Lib/distutils/command/sdist.py | 2 +- Lib/distutils/tests/test_build_clib.py | 2 +- Lib/idlelib/idle_test/mock_tk.py | 8 +++--- Lib/idlelib/idle_test/test_formatparagraph.py | 2 +- Lib/lib2to3/fixes/fix_itertools.py | 4 +- Lib/lib2to3/fixes/fix_metaclass.py | 2 +- Lib/lib2to3/tests/test_parser.py | 2 +- Lib/sqlite3/test/regression.py | 6 +++- Lib/sqlite3/test/types.py | 2 +- Lib/test/support/__init__.py | 4 +- Lib/test/test_email/test_headerregistry.py | 12 +++++----- Lib/unittest/test/test_case.py | 2 +- Lib/unittest/test/test_loader.py | 2 +- Lib/unittest/test/test_program.py | 2 +- Lib/xml/etree/ElementTree.py | 2 +- 22 files changed, 38 insertions(+), 36 deletions(-) diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py --- a/Lib/concurrent/futures/process.py +++ b/Lib/concurrent/futures/process.py @@ -297,7 +297,7 @@ # sysconf not available or setting not available return if nsems_max == -1: - # indetermine limit, assume that limit is determined + # indetermined limit, assume that limit is determined # by available memory only return if nsems_max >= 256: @@ -411,7 +411,7 @@ self._result_queue.put(None) if wait: self._queue_management_thread.join() - # To reduce the risk of openning too many files, remove references to + # To reduce the risk of opening too many files, remove references to # objects that use file descriptors. self._queue_management_thread = None self._call_queue = None diff --git a/Lib/ctypes/test/runtests.py b/Lib/ctypes/test/runtests.py --- a/Lib/ctypes/test/runtests.py +++ b/Lib/ctypes/test/runtests.py @@ -2,7 +2,7 @@ Run all tests found in this directory, and print a summary of the results. Command line flags: - -q quiet mode: don't prnt anything while the tests are running + -q quiet mode: don't print anything while the tests are running -r run tests repeatedly, look for refcount leaks -u Add resources to the lits of allowed resources. '*' allows all diff --git a/Lib/ctypes/test/test_cfuncs.py b/Lib/ctypes/test/test_cfuncs.py --- a/Lib/ctypes/test/test_cfuncs.py +++ b/Lib/ctypes/test/test_cfuncs.py @@ -188,7 +188,7 @@ self.assertEqual(self._dll.tv_i(-42), None) self.assertEqual(self.S(), -42) -# The following repeates the above tests with stdcall functions (where +# The following repeats the above tests with stdcall functions (where # they are available) try: WinDLL diff --git a/Lib/ctypes/test/test_numbers.py b/Lib/ctypes/test/test_numbers.py --- a/Lib/ctypes/test/test_numbers.py +++ b/Lib/ctypes/test/test_numbers.py @@ -213,7 +213,7 @@ def test_init(self): # c_int() can be initialized from Python's int, and c_int. - # Not from c_long or so, which seems strange, abd should + # Not from c_long or so, which seems strange, abc should # probably be changed: self.assertRaises(TypeError, c_int, c_long(42)) diff --git a/Lib/ctypes/test/test_refcounts.py b/Lib/ctypes/test/test_refcounts.py --- a/Lib/ctypes/test/test_refcounts.py +++ b/Lib/ctypes/test/test_refcounts.py @@ -44,7 +44,7 @@ # this is the standard refcount for func self.assertEqual(grc(func), 2) - # the CFuncPtr instance holds atr least one refcount on func: + # the CFuncPtr instance holds at least one refcount on func: f = OtherCallback(func) self.assertTrue(grc(func) > 2) @@ -61,7 +61,7 @@ x = X() x.a = OtherCallback(func) - # the CFuncPtr instance holds atr least one refcount on func: + # the CFuncPtr instance holds at least one refcount on func: self.assertTrue(grc(func) > 2) # and may release it again @@ -74,7 +74,7 @@ f = OtherCallback(func) - # the CFuncPtr instance holds atr least one refcount on func: + # the CFuncPtr instance holds at least one refcount on func: self.assertTrue(grc(func) > 2) # create a cycle diff --git a/Lib/ctypes/test/test_structures.py b/Lib/ctypes/test/test_structures.py --- a/Lib/ctypes/test/test_structures.py +++ b/Lib/ctypes/test/test_structures.py @@ -108,7 +108,7 @@ def test_emtpy(self): # I had problems with these # - # Although these are patological cases: Empty Structures! + # Although these are pathological cases: Empty Structures! class X(Structure): _fields_ = [] diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py --- a/Lib/distutils/command/install.py +++ b/Lib/distutils/command/install.py @@ -545,7 +545,7 @@ self.extra_dirs = extra_dirs def change_roots(self, *names): - """Change the install direcories pointed by name using root.""" + """Change the install directories pointed by name using root.""" for name in names: attr = "install_" + name setattr(self, attr, change_root(self.root, getattr(self, attr))) diff --git a/Lib/distutils/command/sdist.py b/Lib/distutils/command/sdist.py --- a/Lib/distutils/command/sdist.py +++ b/Lib/distutils/command/sdist.py @@ -175,7 +175,7 @@ depends on the user's options. """ # new behavior when using a template: - # the file list is recalculated everytime because + # the file list is recalculated every time because # even if MANIFEST.in or setup.py are not changed # the user might have added some files in the tree that # need to be included. diff --git a/Lib/distutils/tests/test_build_clib.py b/Lib/distutils/tests/test_build_clib.py --- a/Lib/distutils/tests/test_build_clib.py +++ b/Lib/distutils/tests/test_build_clib.py @@ -77,7 +77,7 @@ cmd.compiler = FakeCompiler() - # build_libraries is also doing a bit of typoe checking + # build_libraries is also doing a bit of typo checking lib = [('name', {'sources': 'notvalid'})] self.assertRaises(DistutilsSetupError, cmd.build_libraries, lib) diff --git a/Lib/idlelib/idle_test/mock_tk.py b/Lib/idlelib/idle_test/mock_tk.py --- a/Lib/idlelib/idle_test/mock_tk.py +++ b/Lib/idlelib/idle_test/mock_tk.py @@ -35,7 +35,7 @@ """Mock for tkinter.messagebox with an Mbox_func for each function. This module was 'tkMessageBox' in 2.x; hence the 'import as' in 3.x. - Example usage in test_module.py for testing functios in module.py: + Example usage in test_module.py for testing functions in module.py: --- from idlelib.idle_test.mock_tk import Mbox import module @@ -98,7 +98,7 @@ This implements .index without converting the result back to a string. The result is contrained by the number of lines and linelengths of - self.data. For many indexes, the result is initally (1, 0). + self.data. For many indexes, the result is initially (1, 0). The input index may have any of several possible forms: * line.char float: converted to 'line.char' string; @@ -149,7 +149,7 @@ -1: position before terminal \n; for .insert(), .delete 0: position after terminal \n; for .get, .delete index 1 - 1: same viewed as begininning of non-existent next line (for .index) + 1: same viewed as beginning of non-existent next line (for .index) ''' n = len(self.data) if endflag == 1: @@ -271,7 +271,7 @@ "Scroll screen to make the character at INDEX is visible." pass - # The following is a Misc method inheritet by Text. + # The following is a Misc method inherited by Text. # It should properly go in a Misc mock, but is included here for now. def bind(sequence=None, func=None, add=None): diff --git a/Lib/idlelib/idle_test/test_formatparagraph.py b/Lib/idlelib/idle_test/test_formatparagraph.py --- a/Lib/idlelib/idle_test/test_formatparagraph.py +++ b/Lib/idlelib/idle_test/test_formatparagraph.py @@ -244,7 +244,7 @@ """Test the formatting of text inside a Text widget. This is done with FormatParagraph.format.paragraph_event, - which calls funtions in the module as appropriate. + which calls functions in the module as appropriate. """ test_string = ( " '''this is a test of a reformat for a triple " diff --git a/Lib/lib2to3/fixes/fix_itertools.py b/Lib/lib2to3/fixes/fix_itertools.py --- a/Lib/lib2to3/fixes/fix_itertools.py +++ b/Lib/lib2to3/fixes/fix_itertools.py @@ -34,8 +34,8 @@ # Remove the 'itertools' prefix = it.prefix it.remove() - # Replace the node wich contains ('.', 'function') with the - # function (to be consistant with the second part of the pattern) + # Replace the node which contains ('.', 'function') with the + # function (to be consistent with the second part of the pattern) dot.remove() func.parent.replace(func) diff --git a/Lib/lib2to3/fixes/fix_metaclass.py b/Lib/lib2to3/fixes/fix_metaclass.py --- a/Lib/lib2to3/fixes/fix_metaclass.py +++ b/Lib/lib2to3/fixes/fix_metaclass.py @@ -71,7 +71,7 @@ def fixup_simple_stmt(parent, i, stmt_node): """ if there is a semi-colon all the parts count as part of the same simple_stmt. We just want the __metaclass__ part so we move - everything efter the semi-colon into its own simple_stmt node + everything after the semi-colon into its own simple_stmt node """ for semi_ind, node in enumerate(stmt_node.children): if node.type == token.SEMI: # *sigh* diff --git a/Lib/lib2to3/tests/test_parser.py b/Lib/lib2to3/tests/test_parser.py --- a/Lib/lib2to3/tests/test_parser.py +++ b/Lib/lib2to3/tests/test_parser.py @@ -77,7 +77,7 @@ self.invalid_syntax("raise E from") -# Adapated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef +# Adaptated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef class TestFunctionAnnotations(GrammarTest): def test_1(self): self.validate("""def f(x) -> list: pass""") diff --git a/Lib/sqlite3/test/regression.py b/Lib/sqlite3/test/regression.py --- a/Lib/sqlite3/test/regression.py +++ b/Lib/sqlite3/test/regression.py @@ -161,7 +161,8 @@ def CheckCursorConstructorCallCheck(self): """ - Verifies that cursor methods check wether base class __init__ was called. + Verifies that cursor methods check whether base class __init__ was + called. """ class Cursor(sqlite.Cursor): def __init__(self, con): @@ -187,7 +188,8 @@ def CheckConnectionConstructorCallCheck(self): """ - Verifies that connection methods check wether base class __init__ was called. + Verifies that connection methods check whether base class __init__ was + called. """ class Connection(sqlite.Connection): def __init__(self, name): diff --git a/Lib/sqlite3/test/types.py b/Lib/sqlite3/test/types.py --- a/Lib/sqlite3/test/types.py +++ b/Lib/sqlite3/test/types.py @@ -229,7 +229,7 @@ self.assertEqual(type(value), float) def CheckNumber2(self): - """Checks wether converter names are cut off at '(' characters""" + """Checks whether converter names are cut off at '(' characters""" self.cur.execute("insert into test(n2) values (5)") value = self.cur.execute("select n2 from test").fetchone()[0] # if the converter is not used, it's an int instead of a float diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -246,7 +246,7 @@ if sys.platform.startswith("win"): def _waitfor(func, pathname, waitall=False): - # Peform the operation + # Perform the operation func(pathname) # Now setup the wait loop if waitall: @@ -262,7 +262,7 @@ # required when contention occurs. timeout = 0.001 while timeout < 1.0: - # Note we are only testing for the existance of the file(s) in + # Note we are only testing for the existence of the file(s) in # the contents of the directory regardless of any security or # access rights. If we have made it this far, we have sufficient # permissions to do that much using Python's equivalent of the diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py --- a/Lib/test/test_email/test_headerregistry.py +++ b/Lib/test/test_email/test_headerregistry.py @@ -531,7 +531,7 @@ '\tname*1*=%2A%2A%2Afun%2A%2A%2A%20;\tname*2="is it not.pdf"\n'), ), - # Make sure we also handle it if there are spurrious double qoutes. + # Make sure we also handle it if there are spurious double quotes. 'rfc2231_encoded_with_double_quotes': ( ("text/plain;" '\tname*0*="us-ascii\'\'This%20is%20even%20more%20";' @@ -711,8 +711,8 @@ # in double quotes, making the value a valid non-encoded string. The # old parser decodes this just like the previous case, which may be the # better Postel rule, but could equally result in borking headers that - # intentially have quoted quotes in them. We could get this 98% right - # if we treat it as a quoted string *unless* it matches the + # intentionally have quoted quotes in them. We could get this 98% + # right if we treat it as a quoted string *unless* it matches the # charset'lang'value pattern exactly *and* there is at least one # encoded segment. Implementing that algorithm will require some # refactoring, so I haven't done it (yet). @@ -944,7 +944,7 @@ [errors.InvalidHeaderDefect]), # Unrecoverable invalid values. We *could* apply more heuristics to - # get someing out of the first two, but doing so is not worth the + # get something out of the first two, but doing so is not worth the # effort. 'non_comment_garbage_before': ( @@ -1541,13 +1541,13 @@ def test_fold_unstructured_with_commas(self): # The old wrapper would fold this at the commas. h = self.make_header('Subject', "This header is intended to " - "demonstrate, in a fairly susinct way, that we now do " + "demonstrate, in a fairly succinct way, that we now do " "not give a , special treatment in unstructured headers.") self.assertEqual( h.fold(policy=policy.default.clone(max_line_length=60)), textwrap.dedent("""\ Subject: This header is intended to demonstrate, in a fairly - susinct way, that we now do not give a , special treatment + succinct way, that we now do not give a , special treatment in unstructured headers. """)) diff --git a/Lib/unittest/test/test_case.py b/Lib/unittest/test/test_case.py --- a/Lib/unittest/test/test_case.py +++ b/Lib/unittest/test/test_case.py @@ -1054,7 +1054,7 @@ self.assertWarns(DeprecationWarning, _runtime_warn) def testAssertWarnsContext(self): - # Believe it or not, it is preferrable to duplicate all tests above, + # Believe it or not, it is preferable to duplicate all tests above, # to make sure the __warningregistry__ $@ is circumvented correctly. def _runtime_warn(): warnings.warn("foo", RuntimeWarning) diff --git a/Lib/unittest/test/test_loader.py b/Lib/unittest/test/test_loader.py --- a/Lib/unittest/test/test_loader.py +++ b/Lib/unittest/test/test_loader.py @@ -324,7 +324,7 @@ # Does loadTestsFromName raise TypeError when the `module` argument # isn't a module object? # - # XXX Accepts the not-a-module object, ignorning the object's type + # XXX Accepts the not-a-module object, ignoring the object's type # This should raise an exception or the method name should be changed # # XXX Some people are relying on this, so keep it for now diff --git a/Lib/unittest/test/test_program.py b/Lib/unittest/test/test_program.py --- a/Lib/unittest/test/test_program.py +++ b/Lib/unittest/test/test_program.py @@ -330,7 +330,7 @@ self.assertEqual(program.testNames, argv[1:]) # it may be better to use platform specific functions to normalise paths - # rather than accepting '.PY' and '\' as file seprator on Linux / Mac + # rather than accepting '.PY' and '\' as file separator on Linux / Mac # it would also be better to check that a filename is a valid module # identifier (we have a regex for this in loader.py) # for invalid filenames should we raise a useful error rather than diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py --- a/Lib/xml/etree/ElementTree.py +++ b/Lib/xml/etree/ElementTree.py @@ -836,7 +836,7 @@ @contextlib.contextmanager def _get_writer(file_or_filename, encoding): - # returns text write method and release all resourses after using + # returns text write method and release all resources after using try: write = file_or_filename.write except AttributeError: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 15:14:28 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 17 Aug 2013 15:14:28 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NzQxOiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cHMLX6mtQz7Ljx@mail.python.org> http://hg.python.org/cpython/rev/b3236989f869 changeset: 85229:b3236989f869 parent: 85224:bea2f12e899e parent: 85228:9e4685d703d4 user: Ezio Melotti date: Sat Aug 17 16:13:22 2013 +0300 summary: #18741: merge with 3.3. files: Lib/concurrent/futures/process.py | 4 +- Lib/ctypes/test/runtests.py | 2 +- Lib/ctypes/test/test_cfuncs.py | 2 +- Lib/ctypes/test/test_numbers.py | 2 +- Lib/ctypes/test/test_refcounts.py | 6 ++-- Lib/ctypes/test/test_structures.py | 2 +- Lib/distutils/command/install.py | 2 +- Lib/distutils/command/sdist.py | 2 +- Lib/distutils/tests/test_build_clib.py | 2 +- Lib/idlelib/idle_test/mock_tk.py | 8 +++--- Lib/idlelib/idle_test/test_formatparagraph.py | 2 +- Lib/lib2to3/fixes/fix_itertools.py | 4 +- Lib/lib2to3/fixes/fix_metaclass.py | 2 +- Lib/lib2to3/tests/test_parser.py | 2 +- Lib/sqlite3/test/regression.py | 6 +++- Lib/sqlite3/test/types.py | 2 +- Lib/test/support/__init__.py | 4 +- Lib/test/test_email/test_headerregistry.py | 12 +++++----- Lib/unittest/test/test_case.py | 2 +- Lib/unittest/test/test_loader.py | 2 +- Lib/unittest/test/test_program.py | 2 +- Lib/xml/etree/ElementTree.py | 2 +- 22 files changed, 38 insertions(+), 36 deletions(-) diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py --- a/Lib/concurrent/futures/process.py +++ b/Lib/concurrent/futures/process.py @@ -301,7 +301,7 @@ # sysconf not available or setting not available return if nsems_max == -1: - # indetermine limit, assume that limit is determined + # indetermined limit, assume that limit is determined # by available memory only return if nsems_max >= 256: @@ -415,7 +415,7 @@ self._result_queue.put(None) if wait: self._queue_management_thread.join() - # To reduce the risk of openning too many files, remove references to + # To reduce the risk of opening too many files, remove references to # objects that use file descriptors. self._queue_management_thread = None self._call_queue = None diff --git a/Lib/ctypes/test/runtests.py b/Lib/ctypes/test/runtests.py --- a/Lib/ctypes/test/runtests.py +++ b/Lib/ctypes/test/runtests.py @@ -2,7 +2,7 @@ Run all tests found in this directory, and print a summary of the results. Command line flags: - -q quiet mode: don't prnt anything while the tests are running + -q quiet mode: don't print anything while the tests are running -r run tests repeatedly, look for refcount leaks -u Add resources to the lits of allowed resources. '*' allows all diff --git a/Lib/ctypes/test/test_cfuncs.py b/Lib/ctypes/test/test_cfuncs.py --- a/Lib/ctypes/test/test_cfuncs.py +++ b/Lib/ctypes/test/test_cfuncs.py @@ -188,7 +188,7 @@ self.assertEqual(self._dll.tv_i(-42), None) self.assertEqual(self.S(), -42) -# The following repeates the above tests with stdcall functions (where +# The following repeats the above tests with stdcall functions (where # they are available) try: WinDLL diff --git a/Lib/ctypes/test/test_numbers.py b/Lib/ctypes/test/test_numbers.py --- a/Lib/ctypes/test/test_numbers.py +++ b/Lib/ctypes/test/test_numbers.py @@ -213,7 +213,7 @@ def test_init(self): # c_int() can be initialized from Python's int, and c_int. - # Not from c_long or so, which seems strange, abd should + # Not from c_long or so, which seems strange, abc should # probably be changed: self.assertRaises(TypeError, c_int, c_long(42)) diff --git a/Lib/ctypes/test/test_refcounts.py b/Lib/ctypes/test/test_refcounts.py --- a/Lib/ctypes/test/test_refcounts.py +++ b/Lib/ctypes/test/test_refcounts.py @@ -44,7 +44,7 @@ # this is the standard refcount for func self.assertEqual(grc(func), 2) - # the CFuncPtr instance holds atr least one refcount on func: + # the CFuncPtr instance holds at least one refcount on func: f = OtherCallback(func) self.assertTrue(grc(func) > 2) @@ -61,7 +61,7 @@ x = X() x.a = OtherCallback(func) - # the CFuncPtr instance holds atr least one refcount on func: + # the CFuncPtr instance holds at least one refcount on func: self.assertTrue(grc(func) > 2) # and may release it again @@ -74,7 +74,7 @@ f = OtherCallback(func) - # the CFuncPtr instance holds atr least one refcount on func: + # the CFuncPtr instance holds at least one refcount on func: self.assertTrue(grc(func) > 2) # create a cycle diff --git a/Lib/ctypes/test/test_structures.py b/Lib/ctypes/test/test_structures.py --- a/Lib/ctypes/test/test_structures.py +++ b/Lib/ctypes/test/test_structures.py @@ -108,7 +108,7 @@ def test_emtpy(self): # I had problems with these # - # Although these are patological cases: Empty Structures! + # Although these are pathological cases: Empty Structures! class X(Structure): _fields_ = [] diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py --- a/Lib/distutils/command/install.py +++ b/Lib/distutils/command/install.py @@ -530,7 +530,7 @@ self.extra_dirs = extra_dirs def change_roots(self, *names): - """Change the install direcories pointed by name using root.""" + """Change the install directories pointed by name using root.""" for name in names: attr = "install_" + name setattr(self, attr, change_root(self.root, getattr(self, attr))) diff --git a/Lib/distutils/command/sdist.py b/Lib/distutils/command/sdist.py --- a/Lib/distutils/command/sdist.py +++ b/Lib/distutils/command/sdist.py @@ -175,7 +175,7 @@ depends on the user's options. """ # new behavior when using a template: - # the file list is recalculated everytime because + # the file list is recalculated every time because # even if MANIFEST.in or setup.py are not changed # the user might have added some files in the tree that # need to be included. diff --git a/Lib/distutils/tests/test_build_clib.py b/Lib/distutils/tests/test_build_clib.py --- a/Lib/distutils/tests/test_build_clib.py +++ b/Lib/distutils/tests/test_build_clib.py @@ -77,7 +77,7 @@ cmd.compiler = FakeCompiler() - # build_libraries is also doing a bit of typoe checking + # build_libraries is also doing a bit of typo checking lib = [('name', {'sources': 'notvalid'})] self.assertRaises(DistutilsSetupError, cmd.build_libraries, lib) diff --git a/Lib/idlelib/idle_test/mock_tk.py b/Lib/idlelib/idle_test/mock_tk.py --- a/Lib/idlelib/idle_test/mock_tk.py +++ b/Lib/idlelib/idle_test/mock_tk.py @@ -35,7 +35,7 @@ """Mock for tkinter.messagebox with an Mbox_func for each function. This module was 'tkMessageBox' in 2.x; hence the 'import as' in 3.x. - Example usage in test_module.py for testing functios in module.py: + Example usage in test_module.py for testing functions in module.py: --- from idlelib.idle_test.mock_tk import Mbox import module @@ -98,7 +98,7 @@ This implements .index without converting the result back to a string. The result is contrained by the number of lines and linelengths of - self.data. For many indexes, the result is initally (1, 0). + self.data. For many indexes, the result is initially (1, 0). The input index may have any of several possible forms: * line.char float: converted to 'line.char' string; @@ -149,7 +149,7 @@ -1: position before terminal \n; for .insert(), .delete 0: position after terminal \n; for .get, .delete index 1 - 1: same viewed as begininning of non-existent next line (for .index) + 1: same viewed as beginning of non-existent next line (for .index) ''' n = len(self.data) if endflag == 1: @@ -271,7 +271,7 @@ "Scroll screen to make the character at INDEX is visible." pass - # The following is a Misc method inheritet by Text. + # The following is a Misc method inherited by Text. # It should properly go in a Misc mock, but is included here for now. def bind(sequence=None, func=None, add=None): diff --git a/Lib/idlelib/idle_test/test_formatparagraph.py b/Lib/idlelib/idle_test/test_formatparagraph.py --- a/Lib/idlelib/idle_test/test_formatparagraph.py +++ b/Lib/idlelib/idle_test/test_formatparagraph.py @@ -244,7 +244,7 @@ """Test the formatting of text inside a Text widget. This is done with FormatParagraph.format.paragraph_event, - which calls funtions in the module as appropriate. + which calls functions in the module as appropriate. """ test_string = ( " '''this is a test of a reformat for a triple " diff --git a/Lib/lib2to3/fixes/fix_itertools.py b/Lib/lib2to3/fixes/fix_itertools.py --- a/Lib/lib2to3/fixes/fix_itertools.py +++ b/Lib/lib2to3/fixes/fix_itertools.py @@ -34,8 +34,8 @@ # Remove the 'itertools' prefix = it.prefix it.remove() - # Replace the node wich contains ('.', 'function') with the - # function (to be consistant with the second part of the pattern) + # Replace the node which contains ('.', 'function') with the + # function (to be consistent with the second part of the pattern) dot.remove() func.parent.replace(func) diff --git a/Lib/lib2to3/fixes/fix_metaclass.py b/Lib/lib2to3/fixes/fix_metaclass.py --- a/Lib/lib2to3/fixes/fix_metaclass.py +++ b/Lib/lib2to3/fixes/fix_metaclass.py @@ -71,7 +71,7 @@ def fixup_simple_stmt(parent, i, stmt_node): """ if there is a semi-colon all the parts count as part of the same simple_stmt. We just want the __metaclass__ part so we move - everything efter the semi-colon into its own simple_stmt node + everything after the semi-colon into its own simple_stmt node """ for semi_ind, node in enumerate(stmt_node.children): if node.type == token.SEMI: # *sigh* diff --git a/Lib/lib2to3/tests/test_parser.py b/Lib/lib2to3/tests/test_parser.py --- a/Lib/lib2to3/tests/test_parser.py +++ b/Lib/lib2to3/tests/test_parser.py @@ -77,7 +77,7 @@ self.invalid_syntax("raise E from") -# Adapated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef +# Adaptated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef class TestFunctionAnnotations(GrammarTest): def test_1(self): self.validate("""def f(x) -> list: pass""") diff --git a/Lib/sqlite3/test/regression.py b/Lib/sqlite3/test/regression.py --- a/Lib/sqlite3/test/regression.py +++ b/Lib/sqlite3/test/regression.py @@ -161,7 +161,8 @@ def CheckCursorConstructorCallCheck(self): """ - Verifies that cursor methods check wether base class __init__ was called. + Verifies that cursor methods check whether base class __init__ was + called. """ class Cursor(sqlite.Cursor): def __init__(self, con): @@ -187,7 +188,8 @@ def CheckConnectionConstructorCallCheck(self): """ - Verifies that connection methods check wether base class __init__ was called. + Verifies that connection methods check whether base class __init__ was + called. """ class Connection(sqlite.Connection): def __init__(self, name): diff --git a/Lib/sqlite3/test/types.py b/Lib/sqlite3/test/types.py --- a/Lib/sqlite3/test/types.py +++ b/Lib/sqlite3/test/types.py @@ -229,7 +229,7 @@ self.assertEqual(type(value), float) def CheckNumber2(self): - """Checks wether converter names are cut off at '(' characters""" + """Checks whether converter names are cut off at '(' characters""" self.cur.execute("insert into test(n2) values (5)") value = self.cur.execute("select n2 from test").fetchone()[0] # if the converter is not used, it's an int instead of a float diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -252,7 +252,7 @@ if sys.platform.startswith("win"): def _waitfor(func, pathname, waitall=False): - # Peform the operation + # Perform the operation func(pathname) # Now setup the wait loop if waitall: @@ -268,7 +268,7 @@ # required when contention occurs. timeout = 0.001 while timeout < 1.0: - # Note we are only testing for the existance of the file(s) in + # Note we are only testing for the existence of the file(s) in # the contents of the directory regardless of any security or # access rights. If we have made it this far, we have sufficient # permissions to do that much using Python's equivalent of the diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py --- a/Lib/test/test_email/test_headerregistry.py +++ b/Lib/test/test_email/test_headerregistry.py @@ -531,7 +531,7 @@ '\tname*1*=%2A%2A%2Afun%2A%2A%2A%20;\tname*2="is it not.pdf"\n'), ), - # Make sure we also handle it if there are spurrious double qoutes. + # Make sure we also handle it if there are spurious double quotes. 'rfc2231_encoded_with_double_quotes': ( ("text/plain;" '\tname*0*="us-ascii\'\'This%20is%20even%20more%20";' @@ -711,8 +711,8 @@ # in double quotes, making the value a valid non-encoded string. The # old parser decodes this just like the previous case, which may be the # better Postel rule, but could equally result in borking headers that - # intentially have quoted quotes in them. We could get this 98% right - # if we treat it as a quoted string *unless* it matches the + # intentionally have quoted quotes in them. We could get this 98% + # right if we treat it as a quoted string *unless* it matches the # charset'lang'value pattern exactly *and* there is at least one # encoded segment. Implementing that algorithm will require some # refactoring, so I haven't done it (yet). @@ -944,7 +944,7 @@ [errors.InvalidHeaderDefect]), # Unrecoverable invalid values. We *could* apply more heuristics to - # get someing out of the first two, but doing so is not worth the + # get something out of the first two, but doing so is not worth the # effort. 'non_comment_garbage_before': ( @@ -1541,13 +1541,13 @@ def test_fold_unstructured_with_commas(self): # The old wrapper would fold this at the commas. h = self.make_header('Subject', "This header is intended to " - "demonstrate, in a fairly susinct way, that we now do " + "demonstrate, in a fairly succinct way, that we now do " "not give a , special treatment in unstructured headers.") self.assertEqual( h.fold(policy=policy.default.clone(max_line_length=60)), textwrap.dedent("""\ Subject: This header is intended to demonstrate, in a fairly - susinct way, that we now do not give a , special treatment + succinct way, that we now do not give a , special treatment in unstructured headers. """)) diff --git a/Lib/unittest/test/test_case.py b/Lib/unittest/test/test_case.py --- a/Lib/unittest/test/test_case.py +++ b/Lib/unittest/test/test_case.py @@ -1146,7 +1146,7 @@ self.assertWarns(DeprecationWarning, _runtime_warn) def testAssertWarnsContext(self): - # Believe it or not, it is preferrable to duplicate all tests above, + # Believe it or not, it is preferable to duplicate all tests above, # to make sure the __warningregistry__ $@ is circumvented correctly. def _runtime_warn(): warnings.warn("foo", RuntimeWarning) diff --git a/Lib/unittest/test/test_loader.py b/Lib/unittest/test/test_loader.py --- a/Lib/unittest/test/test_loader.py +++ b/Lib/unittest/test/test_loader.py @@ -324,7 +324,7 @@ # Does loadTestsFromName raise TypeError when the `module` argument # isn't a module object? # - # XXX Accepts the not-a-module object, ignorning the object's type + # XXX Accepts the not-a-module object, ignoring the object's type # This should raise an exception or the method name should be changed # # XXX Some people are relying on this, so keep it for now diff --git a/Lib/unittest/test/test_program.py b/Lib/unittest/test/test_program.py --- a/Lib/unittest/test/test_program.py +++ b/Lib/unittest/test/test_program.py @@ -365,7 +365,7 @@ self.assertEqual(program.testNames, argv[1:]) # it may be better to use platform specific functions to normalise paths - # rather than accepting '.PY' and '\' as file seprator on Linux / Mac + # rather than accepting '.PY' and '\' as file separator on Linux / Mac # it would also be better to check that a filename is a valid module # identifier (we have a regex for this in loader.py) # for invalid filenames should we raise a useful error rather than diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py --- a/Lib/xml/etree/ElementTree.py +++ b/Lib/xml/etree/ElementTree.py @@ -786,7 +786,7 @@ @contextlib.contextmanager def _get_writer(file_or_filename, encoding): - # returns text write method and release all resourses after using + # returns text write method and release all resources after using try: write = file_or_filename.write except AttributeError: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 15:58:32 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 17 Aug 2013 15:58:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogIzE4NDY2OiBmaXgg?= =?utf-8?q?more_typos=2E__Patch_by_F=C3=A9vry_Thibault=2E?= Message-ID: <3cHNKN4TVfz7LjR@mail.python.org> http://hg.python.org/cpython/rev/c75b8d5fa016 changeset: 85230:c75b8d5fa016 branch: 2.7 parent: 85227:5295ed192ffd user: Ezio Melotti date: Sat Aug 17 16:56:09 2013 +0300 summary: #18466: fix more typos. Patch by F?vry Thibault. files: Include/datetime.h | 2 +- Lib/collections.py | 2 +- Lib/test/test_ast.py | 2 +- Lib/test/test_cookielib.py | 2 +- Lib/test/test_datetime.py | 2 +- Lib/test/test_normalization.py | 2 +- Lib/test/test_urllib.py | 2 +- Misc/NEWS | 4 ++-- Modules/_ssl.c | 2 +- Modules/unicodedata.c | 2 +- PC/_subprocess.c | 2 +- PCbuild/readme.txt | 2 +- 12 files changed, 13 insertions(+), 13 deletions(-) diff --git a/Include/datetime.h b/Include/datetime.h --- a/Include/datetime.h +++ b/Include/datetime.h @@ -42,7 +42,7 @@ typedef struct { - PyObject_HEAD /* a pure abstract base clase */ + PyObject_HEAD /* a pure abstract base class */ } PyDateTime_TZInfo; diff --git a/Lib/collections.py b/Lib/collections.py --- a/Lib/collections.py +++ b/Lib/collections.py @@ -369,7 +369,7 @@ result = namespace[typename] # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in enviroments where + # where the named tuple is created. Bypass this step in environments where # sys._getframe is not defined (Jython for example) or sys._getframe is not # defined for arguments greater than 0 (IronPython). try: diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -18,7 +18,7 @@ # These tests are compiled through "exec" -# There should be atleast one test per statement +# There should be at least one test per statement exec_tests = [ # None "None", diff --git a/Lib/test/test_cookielib.py b/Lib/test/test_cookielib.py --- a/Lib/test/test_cookielib.py +++ b/Lib/test/test_cookielib.py @@ -329,7 +329,7 @@ ## commas and equals are commonly appear in the cookie value). This also ## means that if you fold multiple Set-Cookie header fields into one, ## comma-separated list, it'll be a headache to parse (at least my head -## starts hurting everytime I think of that code). +## starts hurting every time I think of that code). ## - Expires: You'll get all sorts of date formats in the expires, ## including emtpy expires attributes ("expires="). Be as flexible as you ## can, and certainly don't expect the weekday to be there; if you can't diff --git a/Lib/test/test_datetime.py b/Lib/test/test_datetime.py --- a/Lib/test/test_datetime.py +++ b/Lib/test/test_datetime.py @@ -124,7 +124,7 @@ self.assertEqual(derived.tzname(None), 'cookie') ############################################################################# -# Base clase for testing a particular aspect of timedelta, time, date and +# Base class for testing a particular aspect of timedelta, time, date and # datetime comparisons. class HarmlessMixedComparison: diff --git a/Lib/test/test_normalization.py b/Lib/test/test_normalization.py --- a/Lib/test/test_normalization.py +++ b/Lib/test/test_normalization.py @@ -57,7 +57,7 @@ c1,c2,c3,c4,c5 = [unistr(x) for x in line.split(';')[:-1]] except RangeError: # Skip unsupported characters; - # try atleast adding c1 if we are in part1 + # try at least adding c1 if we are in part1 if part == "@Part1": try: c1 = unistr(line.split(';')[0]) diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -812,7 +812,7 @@ # Everywhere else they work ok, but on those machines, sometimes # fail in one of the tests, sometimes in other. I have a linux, and # the tests go ok. -# If anybody has one of the problematic enviroments, please help! +# If anybody has one of the problematic environments, please help! # . Facundo # # def server(evt): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -3443,7 +3443,7 @@ - Issue #1285086: Speed up ``urllib.quote()`` and urllib.unquote for simple cases. -- Issue #8688: Distutils now recalculates MANIFEST everytime. +- Issue #8688: Distutils now recalculates MANIFEST every time. - Issue #5099: The ``__del__()`` method of ``subprocess.Popen`` (and the methods it calls) referenced global objects, causing errors to pop up during @@ -6299,7 +6299,7 @@ - Issue #3547: Fixed ctypes structures bitfields of varying integer sizes. -- Issue #3879: A regression in urllib.getproxies_enviroment was fixed. +- Issue #3879: A regression in urllib.getproxies_environment was fixed. - Issue #3863: Disabled a unit test of fork being called from a thread when running on platforms known to exhibit OS bugs when attempting that. diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -1455,7 +1455,7 @@ * Otherwise OpenSSL might read in too much data, * eating clear text data that happens to be * transmitted after the SSL shutdown. - * Should be safe to call repeatedly everytime this + * Should be safe to call repeatedly every time this * function is used and the shutdown_seen_zero != 0 * condition is met. */ diff --git a/Modules/unicodedata.c b/Modules/unicodedata.c --- a/Modules/unicodedata.c +++ b/Modules/unicodedata.c @@ -520,7 +520,7 @@ while(stackptr) { Py_UNICODE code = stack[--stackptr]; /* Hangul Decomposition adds three characters in - a single step, so we need atleast that much room. */ + a single step, so we need at least that much room. */ if (space < 3) { Py_ssize_t newsize = PyString_GET_SIZE(result) + 10; space += 10; diff --git a/PC/_subprocess.c b/PC/_subprocess.c --- a/PC/_subprocess.c +++ b/PC/_subprocess.c @@ -331,7 +331,7 @@ PyObject* values; char* p; - /* convert environment dictionary to windows enviroment string */ + /* convert environment dictionary to windows environment string */ if (! PyMapping_Check(environment)) { PyErr_SetString( PyExc_TypeError, "environment must be dictionary or None"); diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt --- a/PCbuild/readme.txt +++ b/PCbuild/readme.txt @@ -262,7 +262,7 @@ In addition, you need the Visual Studio plugin for external C compilers, from http://sf.net/projects/vsextcomp. The plugin will wrap cl.exe, to locate the proper target compiler, and convert compiler options -accordingly. The project files require atleast version 0.9. +accordingly. The project files require at least version 0.9. Building for AMD64 ------------------ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 15:58:34 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 17 Aug 2013 15:58:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NDY2OiBmaXgg?= =?utf-8?q?more_typos=2E__Patch_by_F=C3=A9vry_Thibault=2E?= Message-ID: <3cHNKQ0wrVz7LjR@mail.python.org> http://hg.python.org/cpython/rev/61227b4c169f changeset: 85231:61227b4c169f branch: 3.3 parent: 85228:9e4685d703d4 user: Ezio Melotti date: Sat Aug 17 16:57:41 2013 +0300 summary: #18466: fix more typos. Patch by F?vry Thibault. files: Include/datetime.h | 2 +- Lib/collections/__init__.py | 2 +- Lib/test/datetimetester.py | 2 +- Lib/test/test_ast.py | 2 +- Lib/test/test_http_cookiejar.py | 2 +- Lib/test/test_normalization.py | 2 +- Lib/test/test_urllib.py | 2 +- Misc/HISTORY | 2 +- Modules/_ssl.c | 2 +- Modules/_winapi.c | 2 +- Modules/unicodedata.c | 2 +- PCbuild/readme.txt | 2 +- 12 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Include/datetime.h b/Include/datetime.h --- a/Include/datetime.h +++ b/Include/datetime.h @@ -42,7 +42,7 @@ typedef struct { - PyObject_HEAD /* a pure abstract base clase */ + PyObject_HEAD /* a pure abstract base class */ } PyDateTime_TZInfo; diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -375,7 +375,7 @@ print(result._source) # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in enviroments where + # where the named tuple is created. Bypass this step in environments where # sys._getframe is not defined (Jython for example) or sys._getframe is not # defined for arguments greater than 0 (IronPython). try: diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -250,7 +250,7 @@ t.replace(tzinfo=tz).dst()) ############################################################################# -# Base clase for testing a particular aspect of timedelta, time, date and +# Base class for testing a particular aspect of timedelta, time, date and # datetime comparisons. class HarmlessMixedComparison: diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -22,7 +22,7 @@ # These tests are compiled through "exec" -# There should be atleast one test per statement +# There should be at least one test per statement exec_tests = [ # None "None", diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py --- a/Lib/test/test_http_cookiejar.py +++ b/Lib/test/test_http_cookiejar.py @@ -374,7 +374,7 @@ ## commas and equals are commonly appear in the cookie value). This also ## means that if you fold multiple Set-Cookie header fields into one, ## comma-separated list, it'll be a headache to parse (at least my head -## starts hurting everytime I think of that code). +## starts hurting every time I think of that code). ## - Expires: You'll get all sorts of date formats in the expires, ## including emtpy expires attributes ("expires="). Be as flexible as you ## can, and certainly don't expect the weekday to be there; if you can't diff --git a/Lib/test/test_normalization.py b/Lib/test/test_normalization.py --- a/Lib/test/test_normalization.py +++ b/Lib/test/test_normalization.py @@ -59,7 +59,7 @@ c1,c2,c3,c4,c5 = [unistr(x) for x in line.split(';')[:-1]] except RangeError: # Skip unsupported characters; - # try atleast adding c1 if we are in part1 + # try at least adding c1 if we are in part1 if part == "@Part1": try: c1 = unistr(line.split(';')[0]) diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -1218,7 +1218,7 @@ # Everywhere else they work ok, but on those machines, sometimes # fail in one of the tests, sometimes in other. I have a linux, and # the tests go ok. -# If anybody has one of the problematic enviroments, please help! +# If anybody has one of the problematic environments, please help! # . Facundo # # def server(evt): diff --git a/Misc/HISTORY b/Misc/HISTORY --- a/Misc/HISTORY +++ b/Misc/HISTORY @@ -2806,7 +2806,7 @@ - Issue #1285086: Speed up urllib.parse functions: quote, quote_from_bytes, unquote, unquote_to_bytes. -- Issue #8688: Distutils now recalculates MANIFEST everytime. +- Issue #8688: Distutils now recalculates MANIFEST every time. - Issue #8477: ssl.RAND_egd() and ssl._test_decode_cert() support str with surrogates and bytes for the filename. diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -1564,7 +1564,7 @@ * Otherwise OpenSSL might read in too much data, * eating clear text data that happens to be * transmitted after the SSL shutdown. - * Should be safe to call repeatedly everytime this + * Should be safe to call repeatedly every time this * function is used and the shutdown_seen_zero != 0 * condition is met. */ diff --git a/Modules/_winapi.c b/Modules/_winapi.c --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -489,7 +489,7 @@ Py_UCS4 *buffer = NULL, *p, *end; PyObject *keys, *values, *res; - /* convert environment dictionary to windows enviroment string */ + /* convert environment dictionary to windows environment string */ if (! PyMapping_Check(environment)) { PyErr_SetString( PyExc_TypeError, "environment must be dictionary or None"); diff --git a/Modules/unicodedata.c b/Modules/unicodedata.c --- a/Modules/unicodedata.c +++ b/Modules/unicodedata.c @@ -524,7 +524,7 @@ while(stackptr) { Py_UCS4 code = stack[--stackptr]; /* Hangul Decomposition adds three characters in - a single step, so we need atleast that much room. */ + a single step, so we need at least that much room. */ if (space < 3) { Py_UCS4 *new_output; osize += 10; diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt --- a/PCbuild/readme.txt +++ b/PCbuild/readme.txt @@ -265,7 +265,7 @@ In addition, you need the Visual Studio plugin for external C compilers, from http://sf.net/projects/vsextcomp. The plugin will wrap cl.exe, to locate the proper target compiler, and convert compiler options -accordingly. The project files require atleast version 0.9. +accordingly. The project files require at least version 0.9. Building for AMD64 ------------------ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 15:58:35 2013 From: python-checkins at python.org (ezio.melotti) Date: Sat, 17 Aug 2013 15:58:35 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogIzE4NDY2OiBtZXJnZSB3aXRoIDMuMy4=?= Message-ID: <3cHNKR4Vkcz7Ljb@mail.python.org> http://hg.python.org/cpython/rev/4cc308acd26d changeset: 85232:4cc308acd26d parent: 85229:b3236989f869 parent: 85231:61227b4c169f user: Ezio Melotti date: Sat Aug 17 16:58:13 2013 +0300 summary: #18466: merge with 3.3. files: Include/datetime.h | 2 +- Lib/collections/__init__.py | 2 +- Lib/test/datetimetester.py | 2 +- Lib/test/test_ast.py | 2 +- Lib/test/test_http_cookiejar.py | 2 +- Lib/test/test_normalization.py | 2 +- Lib/test/test_urllib.py | 2 +- Misc/HISTORY | 2 +- Modules/_ssl.c | 2 +- Modules/_winapi.c | 2 +- Modules/unicodedata.c | 2 +- PCbuild/readme.txt | 2 +- 12 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Include/datetime.h b/Include/datetime.h --- a/Include/datetime.h +++ b/Include/datetime.h @@ -42,7 +42,7 @@ typedef struct { - PyObject_HEAD /* a pure abstract base clase */ + PyObject_HEAD /* a pure abstract base class */ } PyDateTime_TZInfo; diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -372,7 +372,7 @@ print(result._source) # For pickling to work, the __module__ variable needs to be set to the frame - # where the named tuple is created. Bypass this step in enviroments where + # where the named tuple is created. Bypass this step in environments where # sys._getframe is not defined (Jython for example) or sys._getframe is not # defined for arguments greater than 0 (IronPython). try: diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -250,7 +250,7 @@ t.replace(tzinfo=tz).dst()) ############################################################################# -# Base clase for testing a particular aspect of timedelta, time, date and +# Base class for testing a particular aspect of timedelta, time, date and # datetime comparisons. class HarmlessMixedComparison: diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -22,7 +22,7 @@ # These tests are compiled through "exec" -# There should be atleast one test per statement +# There should be at least one test per statement exec_tests = [ # None "None", diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py --- a/Lib/test/test_http_cookiejar.py +++ b/Lib/test/test_http_cookiejar.py @@ -374,7 +374,7 @@ ## commas and equals are commonly appear in the cookie value). This also ## means that if you fold multiple Set-Cookie header fields into one, ## comma-separated list, it'll be a headache to parse (at least my head -## starts hurting everytime I think of that code). +## starts hurting every time I think of that code). ## - Expires: You'll get all sorts of date formats in the expires, ## including emtpy expires attributes ("expires="). Be as flexible as you ## can, and certainly don't expect the weekday to be there; if you can't diff --git a/Lib/test/test_normalization.py b/Lib/test/test_normalization.py --- a/Lib/test/test_normalization.py +++ b/Lib/test/test_normalization.py @@ -59,7 +59,7 @@ c1,c2,c3,c4,c5 = [unistr(x) for x in line.split(';')[:-1]] except RangeError: # Skip unsupported characters; - # try atleast adding c1 if we are in part1 + # try at least adding c1 if we are in part1 if part == "@Part1": try: c1 = unistr(line.split(';')[0]) diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -1294,7 +1294,7 @@ # Everywhere else they work ok, but on those machines, sometimes # fail in one of the tests, sometimes in other. I have a linux, and # the tests go ok. -# If anybody has one of the problematic enviroments, please help! +# If anybody has one of the problematic environments, please help! # . Facundo # # def server(evt): diff --git a/Misc/HISTORY b/Misc/HISTORY --- a/Misc/HISTORY +++ b/Misc/HISTORY @@ -6903,7 +6903,7 @@ - Issue #1285086: Speed up urllib.parse functions: quote, quote_from_bytes, unquote, unquote_to_bytes. -- Issue #8688: Distutils now recalculates MANIFEST everytime. +- Issue #8688: Distutils now recalculates MANIFEST every time. - Issue #8477: ssl.RAND_egd() and ssl._test_decode_cert() support str with surrogates and bytes for the filename. diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -1638,7 +1638,7 @@ * Otherwise OpenSSL might read in too much data, * eating clear text data that happens to be * transmitted after the SSL shutdown. - * Should be safe to call repeatedly everytime this + * Should be safe to call repeatedly every time this * function is used and the shutdown_seen_zero != 0 * condition is met. */ diff --git a/Modules/_winapi.c b/Modules/_winapi.c --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -491,7 +491,7 @@ Py_UCS4 *buffer = NULL, *p, *end; PyObject *keys, *values, *res; - /* convert environment dictionary to windows enviroment string */ + /* convert environment dictionary to windows environment string */ if (! PyMapping_Check(environment)) { PyErr_SetString( PyExc_TypeError, "environment must be dictionary or None"); diff --git a/Modules/unicodedata.c b/Modules/unicodedata.c --- a/Modules/unicodedata.c +++ b/Modules/unicodedata.c @@ -524,7 +524,7 @@ while(stackptr) { Py_UCS4 code = stack[--stackptr]; /* Hangul Decomposition adds three characters in - a single step, so we need atleast that much room. */ + a single step, so we need at least that much room. */ if (space < 3) { Py_UCS4 *new_output; osize += 10; diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt --- a/PCbuild/readme.txt +++ b/PCbuild/readme.txt @@ -265,7 +265,7 @@ In addition, you need the Visual Studio plugin for external C compilers, from http://sf.net/projects/vsextcomp. The plugin will wrap cl.exe, to locate the proper target compiler, and convert compiler options -accordingly. The project files require atleast version 0.9. +accordingly. The project files require at least version 0.9. Building for AMD64 ------------------ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 17:19:54 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 17:19:54 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzY4?= =?utf-8?q?=3A_coding_style_nitpick=2E_Thanks_to_Vajrasky_Kok?= Message-ID: <3cHQ7G4vCGz7LjW@mail.python.org> http://hg.python.org/cpython/rev/b352a5cb60b6 changeset: 85233:b352a5cb60b6 branch: 3.3 parent: 85231:61227b4c169f user: Christian Heimes date: Sat Aug 17 17:18:56 2013 +0200 summary: Issue #18768: coding style nitpick. Thanks to Vajrasky Kok files: Modules/_ssl.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -776,7 +776,7 @@ ASN1_STRING *as = NULL; name = sk_GENERAL_NAME_value(names, j); - gntype = name-> type; + gntype = name->type; switch (gntype) { case GEN_DIRNAME: /* we special-case DirName as a tuple of -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 17:19:55 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 17:19:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318768=3A_coding_style_nitpick=2E_Thanks_to_Vajr?= =?utf-8?q?asky_Kok?= Message-ID: <3cHQ7H6m3cz7LkH@mail.python.org> http://hg.python.org/cpython/rev/fe444f324756 changeset: 85234:fe444f324756 parent: 85232:4cc308acd26d parent: 85233:b352a5cb60b6 user: Christian Heimes date: Sat Aug 17 17:19:03 2013 +0200 summary: Issue #18768: coding style nitpick. Thanks to Vajrasky Kok files: Modules/_ssl.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -810,7 +810,7 @@ ASN1_STRING *as = NULL; name = sk_GENERAL_NAME_value(names, j); - gntype = name-> type; + gntype = name->type; switch (gntype) { case GEN_DIRNAME: /* we special-case DirName as a tuple of -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 17:19:57 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 17:19:57 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NzY4?= =?utf-8?q?=3A_coding_style_nitpick=2E_Thanks_to_Vajrasky_Kok?= Message-ID: <3cHQ7K1S59z7Lk3@mail.python.org> http://hg.python.org/cpython/rev/a8787a6fa107 changeset: 85235:a8787a6fa107 branch: 2.7 parent: 85230:c75b8d5fa016 user: Christian Heimes date: Sat Aug 17 17:18:56 2013 +0200 summary: Issue #18768: coding style nitpick. Thanks to Vajrasky Kok files: Modules/_ssl.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -743,7 +743,7 @@ ASN1_STRING *as = NULL; name = sk_GENERAL_NAME_value(names, j); - gntype = name-> type; + gntype = name->type; switch (gntype) { case GEN_DIRNAME: /* we special-case DirName as a tuple of -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 17:26:17 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 17:26:17 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgMTg3Njg6?= =?utf-8?q?_Correct_doc_string_of_RAND=5Fedg=28=29=2E_Patch_by_Vajrasky_Ko?= =?utf-8?q?k=2E?= Message-ID: <3cHQGd4yDSzT2B@mail.python.org> http://hg.python.org/cpython/rev/ae91252943bf changeset: 85236:ae91252943bf branch: 3.3 parent: 85233:b352a5cb60b6 user: Christian Heimes date: Sat Aug 17 17:25:18 2013 +0200 summary: Issue 18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. files: Misc/NEWS | 2 ++ Modules/_ssl.c | 2 +- 2 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,8 @@ Library ------- +- Issue 18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. + - Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke malloc weak symbols. diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -2576,7 +2576,7 @@ \n\ Queries the entropy gather daemon (EGD) on the socket named by 'path'.\n\ Returns number of bytes read. Raises SSLError if connection to EGD\n\ -fails or if it does provide enough data to seed PRNG."); +fails or if it does not provide enough data to seed PRNG."); #endif -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 17:26:18 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 17:26:18 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_18768=3A_Correct_doc_string_of_RAND=5Fedg=28=29=2E?= =?utf-8?q?_Patch_by_Vajrasky_Kok=2E?= Message-ID: <3cHQGf70qkz7Ljl@mail.python.org> http://hg.python.org/cpython/rev/5c091acc799f changeset: 85237:5c091acc799f parent: 85234:fe444f324756 parent: 85236:ae91252943bf user: Christian Heimes date: Sat Aug 17 17:25:27 2013 +0200 summary: Issue 18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. files: Misc/NEWS | 2 ++ Modules/_ssl.c | 2 +- 2 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -28,6 +28,8 @@ Library ------- +- Issue 18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. + - Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke malloc weak symbols. diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -2934,7 +2934,7 @@ \n\ Queries the entropy gather daemon (EGD) on the socket named by 'path'.\n\ Returns number of bytes read. Raises SSLError if connection to EGD\n\ -fails or if it does provide enough data to seed PRNG."); +fails or if it does not provide enough data to seed PRNG."); #endif -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 17:26:20 2013 From: python-checkins at python.org (christian.heimes) Date: Sat, 17 Aug 2013 17:26:20 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgMTg3Njg6?= =?utf-8?q?_Correct_doc_string_of_RAND=5Fedg=28=29=2E_Patch_by_Vajrasky_Ko?= =?utf-8?q?k=2E?= Message-ID: <3cHQGh2HM4z7Ljt@mail.python.org> http://hg.python.org/cpython/rev/31389495cdbf changeset: 85238:31389495cdbf branch: 2.7 parent: 85235:a8787a6fa107 user: Christian Heimes date: Sat Aug 17 17:25:18 2013 +0200 summary: Issue 18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. files: Misc/NEWS | 2 ++ Modules/_ssl.c | 2 +- 2 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,8 @@ Library ------- +- Issue 18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. + - Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke malloc weak symbols. diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -1619,7 +1619,7 @@ \n\ Queries the entropy gather daemon (EGD) on the socket named by 'path'.\n\ Returns number of bytes read. Raises SSLError if connection to EGD\n\ -fails or if it does provide enough data to seed PRNG."); +fails or if it does not provide enough data to seed PRNG."); #endif -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 20:33:11 2013 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 17 Aug 2013 20:33:11 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2316105=3A_When_a_s?= =?utf-8?q?ignal_handler_fails_to_write_to_the_file_descriptor?= Message-ID: <3cHVQH70vfzRTm@mail.python.org> http://hg.python.org/cpython/rev/e2b234f5bf7d changeset: 85239:e2b234f5bf7d parent: 85237:5c091acc799f user: Antoine Pitrou date: Sat Aug 17 20:27:56 2013 +0200 summary: Issue #16105: When a signal handler fails to write to the file descriptor registered with ``signal.set_wakeup_fd()``, report an exception instead of ignoring the error. files: Lib/test/test_signal.py | 41 +++++++++++++++++++++++++++++ Misc/NEWS | 4 ++ Modules/signalmodule.c | 18 ++++++++++++- 3 files changed, 62 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -275,6 +275,47 @@ assert_python_ok('-c', code) + def test_wakeup_write_error(self): + # Issue #16105: write() errors in the C signal handler should not + # pass silently. + # Use a subprocess to have only one thread. + code = """if 1: + import errno + import fcntl + import os + import signal + import sys + import time + from test.support import captured_stderr + + def handler(signum, frame): + 1/0 + + signal.signal(signal.SIGALRM, handler) + r, w = os.pipe() + flags = fcntl.fcntl(r, fcntl.F_GETFL, 0) + fcntl.fcntl(r, fcntl.F_SETFL, flags | os.O_NONBLOCK) + + # Set wakeup_fd a read-only file descriptor to trigger the error + signal.set_wakeup_fd(r) + try: + with captured_stderr() as err: + signal.alarm(1) + time.sleep(5.0) + except ZeroDivisionError: + # An ignored exception should have been printed out on stderr + err = err.getvalue() + if ('Exception ignored when trying to write to the signal wakeup fd' + not in err): + raise AssertionError(err) + if ('OSError: [Errno %d]' % errno.EBADF) not in err: + raise AssertionError(err) + else: + raise AssertionError("ZeroDivisionError not raised") + """ + + assert_python_ok('-c', code) + def test_wakeup_fd_early(self): self.check_wakeup("""def test(): import select diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,10 @@ Core and Builtins ----------------- +- Issue #16105: When a signal handler fails to write to the file descriptor + registered with ``signal.set_wakeup_fd()``, report an exception instead + of ignoring the error. + - Issue #18722: Remove uses of the "register" keyword in C code. - Issue #18667: Add missing "HAVE_FCHOWNAT" symbol to posix._have_functions. diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -175,15 +175,31 @@ return PyErr_CheckSignals(); } +static int +report_wakeup_error(void *data) +{ + int save_errno = errno; + errno = (int) (Py_intptr_t) data; + PyErr_SetFromErrno(PyExc_OSError); + PySys_WriteStderr("Exception ignored when trying to write to the " + "signal wakeup fd:\n"); + PyErr_WriteUnraisable(NULL); + errno = save_errno; + return 0; +} + static void trip_signal(int sig_num) { unsigned char byte; + int rc = 0; Handlers[sig_num].tripped = 1; if (wakeup_fd != -1) { byte = (unsigned char)sig_num; - write(wakeup_fd, &byte, 1); + while ((rc = write(wakeup_fd, &byte, 1)) == -1 && errno == EINTR); + if (rc == -1) + Py_AddPendingCall(report_wakeup_error, (void *) (Py_intptr_t) errno); } if (is_tripped) return; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Aug 17 21:44:06 2013 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 17 Aug 2013 21:44:06 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Make_test=5Fwakeup=5Fwrite?= =?utf-8?q?=5Ferror_more_robust?= Message-ID: <3cHX066qX5zSgZ@mail.python.org> http://hg.python.org/cpython/rev/f2d955afad8a changeset: 85240:f2d955afad8a user: Antoine Pitrou date: Sat Aug 17 21:43:47 2013 +0200 summary: Make test_wakeup_write_error more robust (trying to fix a failure on the FreeBSD 9.0 buildbot) files: Lib/test/test_signal.py | 10 ++++++++++ 1 files changed, 10 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -313,6 +313,16 @@ else: raise AssertionError("ZeroDivisionError not raised") """ + r, w = os.pipe() + try: + os.write(r, b'x') + except OSError: + pass + else: + self.skipTest("OS doesn't report write() error on the read end of a pipe") + finally: + os.close(r) + os.close(w) assert_python_ok('-c', code) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 18 03:16:30 2013 From: python-checkins at python.org (christian.heimes) Date: Sun, 18 Aug 2013 03:16:30 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_add_missing_?= =?utf-8?q?=23?= Message-ID: <3cHgMf24T9z7LjR@mail.python.org> http://hg.python.org/cpython/rev/1b7ec1cd6f61 changeset: 85241:1b7ec1cd6f61 branch: 3.3 parent: 85236:ae91252943bf user: Christian Heimes date: Sun Aug 18 03:11:11 2013 +0200 summary: add missing # files: Misc/NEWS | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,7 +66,7 @@ Library ------- -- Issue 18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. +- Issue #18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. - Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke malloc weak symbols. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 18 03:16:31 2013 From: python-checkins at python.org (christian.heimes) Date: Sun, 18 Aug 2013 03:16:31 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_add_missing_=23?= Message-ID: <3cHgMg476vz7LjR@mail.python.org> http://hg.python.org/cpython/rev/a868027d97eb changeset: 85242:a868027d97eb parent: 85237:5c091acc799f parent: 85241:1b7ec1cd6f61 user: Christian Heimes date: Sun Aug 18 03:11:47 2013 +0200 summary: add missing # files: Misc/NEWS | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -28,7 +28,7 @@ Library ------- -- Issue 18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. +- Issue #18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. - Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke malloc weak symbols. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 18 03:16:32 2013 From: python-checkins at python.org (christian.heimes) Date: Sun, 18 Aug 2013 03:16:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_add_missing_?= =?utf-8?q?=23?= Message-ID: <3cHgMh66Rcz7LjS@mail.python.org> http://hg.python.org/cpython/rev/72c7a4cd4f55 changeset: 85243:72c7a4cd4f55 branch: 2.7 parent: 85238:31389495cdbf user: Christian Heimes date: Sun Aug 18 03:11:11 2013 +0200 summary: add missing # files: Misc/NEWS | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,7 +29,7 @@ Library ------- -- Issue 18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. +- Issue #18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. - Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke malloc weak symbols. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Aug 18 03:16:34 2013 From: python-checkins at python.org (christian.heimes) Date: Sun, 18 Aug 2013 03:16:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_default_-=3E_default?= =?utf-8?q?=29=3A_merge?= Message-ID: <3cHgMk222Hz7LjR@mail.python.org> http://hg.python.org/cpython/rev/76ba4a4b4fe9 changeset: 85244:76ba4a4b4fe9 parent: 85242:a868027d97eb parent: 85240:f2d955afad8a user: Christian Heimes date: Sun Aug 18 03:16:16 2013 +0200 summary: merge files: Lib/test/test_signal.py | 51 +++++++++++++++++++++++++++++ Misc/NEWS | 4 ++ Modules/signalmodule.c | 18 +++++++++- 3 files changed, 72 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -275,6 +275,57 @@ assert_python_ok('-c', code) + def test_wakeup_write_error(self): + # Issue #16105: write() errors in the C signal handler should not + # pass silently. + # Use a subprocess to have only one thread. + code = """if 1: + import errno + import fcntl + import os + import signal + import sys + import time + from test.support import captured_stderr + + def handler(signum, frame): + 1/0 + + signal.signal(signal.SIGALRM, handler) + r, w = os.pipe() + flags = fcntl.fcntl(r, fcntl.F_GETFL, 0) + fcntl.fcntl(r, fcntl.F_SETFL, flags | os.O_NONBLOCK) + + # Set wakeup_fd a read-only file descriptor to trigger the error + signal.set_wakeup_fd(r) + try: + with captured_stderr() as err: + signal.alarm(1) + time.sleep(5.0) + except ZeroDivisionError: + # An ignored exception should have been printed out on stderr + err = err.getvalue() + if ('Exception ignored when trying to write to the signal wakeup fd' + not in err): + raise AssertionError(err) + if ('OSError: [Errno %d]' % errno.EBADF) not in err: + raise AssertionError(err) + else: + raise AssertionError("ZeroDivisionError not raised") + """ + r, w = os.pipe() + try: + os.write(r, b'x') + except OSError: + pass + else: + self.skipTest("OS doesn't report write() error on the read end of a pipe") + finally: + os.close(r) + os.close(w) + + assert_python_ok('-c', code) + def test_wakeup_fd_early(self): self.check_wakeup("""def test(): import select diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,10 @@ Core and Builtins ----------------- +- Issue #16105: When a signal handler fails to write to the file descriptor + registered with ``signal.set_wakeup_fd()``, report an exception instead + of ignoring the error. + - Issue #18722: Remove uses of the "register" keyword in C code. - Issue #18667: Add missing "HAVE_FCHOWNAT" symbol to posix._have_functions. diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -175,15 +175,31 @@ return PyErr_CheckSignals(); } +static int +report_wakeup_error(void *data) +{ + int save_errno = errno; + errno = (int) (Py_intptr_t) data; + PyErr_SetFromErrno(PyExc_OSError); + PySys_WriteStderr("Exception ignored when trying to write to the " + "signal wakeup fd:\n"); + PyErr_WriteUnraisable(NULL); + errno = save_errno; + return 0; +} + static void trip_signal(int sig_num) { unsigned char byte; + int rc = 0; Handlers[sig_num].tripped = 1; if (wakeup_fd != -1) { byte = (unsigned char)sig_num; - write(wakeup_fd, &byte, 1); + while ((rc = write(wakeup_fd, &byte, 1)) == -1 && errno == EINTR); + if (rc == -1) + Py_AddPendingCall(report_wakeup_error, (void *) (Py_intptr_t) errno); } if (is_tripped) return; -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sun Aug 18 06:11:34 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sun, 18 Aug 2013 06:11:34 +0200 Subject: [Python-checkins] Daily reference leaks (76ba4a4b4fe9): sum=0 Message-ID: results for 76ba4a4b4fe9 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogA5iXtX', '-x'] From python-checkins at python.org Sun Aug 18 12:43:33 2013 From: python-checkins at python.org (christian.heimes) Date: Sun, 18 Aug 2013 12:43:33 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318774=3A_Remove_l?= =?utf-8?q?ast_bits_of_GNU_PTH_thread_code=2C_patch_by_Vajrasky_Kok=2E?= Message-ID: <3cHvxx1yxWz7LjP@mail.python.org> http://hg.python.org/cpython/rev/646c2388d8f5 changeset: 85245:646c2388d8f5 user: Christian Heimes date: Sun Aug 18 12:43:24 2013 +0200 summary: Issue #18774: Remove last bits of GNU PTH thread code, patch by Vajrasky Kok. files: Misc/NEWS | 2 ++ Modules/signalmodule.c | 12 ------------ 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #18774: Remove last bits of GNU PTH thread code and thread_pth.h. + - Issue #16105: When a signal handler fails to write to the file descriptor registered with ``signal.set_wakeup_fd()``, report an exception instead of ignoring the error. diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -73,10 +73,6 @@ a working implementation that works in all three cases -- the handler ignores signals if getpid() isn't the same as in the main thread. XXX This is a hack. - - GNU pth is a user-space threading library, and as such, all threads - run within the same process. In this case, if the currently running - thread is not the main_thread, send the signal to the main_thread. */ #ifdef WITH_THREAD @@ -214,13 +210,6 @@ { int save_errno = errno; -#if defined(WITH_THREAD) && defined(WITH_PTH) - if (PyThread_get_thread_ident() != main_thread) { - pth_raise(*(pth_t *) main_thread, sig_num); - } - else -#endif - { #ifdef WITH_THREAD /* See NOTES section above */ if (getpid() == main_pid) @@ -242,7 +231,6 @@ * makes this true. See also issue8354. */ PyOS_setsig(sig_num, signal_handler); #endif - } /* Issue #10311: asynchronously executing signal handlers should not mutate errno under the feet of unsuspecting C code. */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 00:29:23 2013 From: python-checkins at python.org (terry.reedy) Date: Mon, 19 Aug 2013 00:29:23 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NTky?= =?utf-8?q?=3A_Add_docstrings_to_file_being_tested_=28idlelib=2ESearchDial?= =?utf-8?b?b2dCYXNlLnB5KS4=?= Message-ID: <3cJCcM3NGgz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/0d2b87ec9f2b changeset: 85246:0d2b87ec9f2b branch: 2.7 parent: 85243:72c7a4cd4f55 user: Terry Jan Reedy date: Sun Aug 18 18:22:34 2013 -0400 summary: Issue #18592: Add docstrings to file being tested (idlelib.SearchDialogBase.py). files: Lib/idlelib/SearchDialogBase.py | 17 +++++++++++++++++ 1 files changed, 17 insertions(+), 0 deletions(-) diff --git a/Lib/idlelib/SearchDialogBase.py b/Lib/idlelib/SearchDialogBase.py --- a/Lib/idlelib/SearchDialogBase.py +++ b/Lib/idlelib/SearchDialogBase.py @@ -1,6 +1,23 @@ +'''Define SearchDialogBase used by Search, Replace, and Grep dialogs.''' from Tkinter import * class SearchDialogBase: + '''Create most of a modal search dialog (make_frame, create_widgets). + + The wide left column contains: + 1 or 2 text entry lines (create_entries, make_entry); + a row of standard radiobuttons (create_option_buttons); + a row of dialog specific radiobuttons (create_other_buttons). + + The narrow right column contains command buttons + (create_command_buttons, make_button). + These are bound to functions that execute the command. + + Except for command buttons, this base class is not limited to + items common to all three subclasses. Rather, it is the Find dialog + minus the "Find Next" command and its execution function. + The other dialogs override methods to replace and add widgets. + ''' title = "Search Dialog" icon = "Search" -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 00:29:24 2013 From: python-checkins at python.org (terry.reedy) Date: Mon, 19 Aug 2013 00:29:24 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NTky?= =?utf-8?q?=3A_Add_docstrings_to_file_being_tested_=28idlelib=2ESearchDial?= =?utf-8?b?b2dCYXNlLnB5KS4=?= Message-ID: <3cJCcN5F2zz7Ljc@mail.python.org> http://hg.python.org/cpython/rev/e09dfdbeb9a2 changeset: 85247:e09dfdbeb9a2 branch: 3.3 parent: 85241:1b7ec1cd6f61 user: Terry Jan Reedy date: Sun Aug 18 18:22:43 2013 -0400 summary: Issue #18592: Add docstrings to file being tested (idlelib.SearchDialogBase.py). files: Lib/idlelib/SearchDialogBase.py | 17 +++++++++++++++++ 1 files changed, 17 insertions(+), 0 deletions(-) diff --git a/Lib/idlelib/SearchDialogBase.py b/Lib/idlelib/SearchDialogBase.py --- a/Lib/idlelib/SearchDialogBase.py +++ b/Lib/idlelib/SearchDialogBase.py @@ -1,6 +1,23 @@ +'''Define SearchDialogBase used by Search, Replace, and Grep dialogs.''' from tkinter import * class SearchDialogBase: + '''Create most of a modal search dialog (make_frame, create_widgets). + + The wide left column contains: + 1 or 2 text entry lines (create_entries, make_entry); + a row of standard radiobuttons (create_option_buttons); + a row of dialog specific radiobuttons (create_other_buttons). + + The narrow right column contains command buttons + (create_command_buttons, make_button). + These are bound to functions that execute the command. + + Except for command buttons, this base class is not limited to + items common to all three subclasses. Rather, it is the Find dialog + minus the "Find Next" command and its execution function. + The other dialogs override methods to replace and add widgets. + ''' title = "Search Dialog" icon = "Search" -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 00:29:25 2013 From: python-checkins at python.org (terry.reedy) Date: Mon, 19 Aug 2013 00:29:25 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_with_3=2E3?= Message-ID: <3cJCcP75Nzz7Ljr@mail.python.org> http://hg.python.org/cpython/rev/3deccc7eee1b changeset: 85248:3deccc7eee1b parent: 85245:646c2388d8f5 parent: 85247:e09dfdbeb9a2 user: Terry Jan Reedy date: Sun Aug 18 18:23:10 2013 -0400 summary: Merge with 3.3 files: Lib/idlelib/SearchDialogBase.py | 17 +++++++++++++++++ 1 files changed, 17 insertions(+), 0 deletions(-) diff --git a/Lib/idlelib/SearchDialogBase.py b/Lib/idlelib/SearchDialogBase.py --- a/Lib/idlelib/SearchDialogBase.py +++ b/Lib/idlelib/SearchDialogBase.py @@ -1,6 +1,23 @@ +'''Define SearchDialogBase used by Search, Replace, and Grep dialogs.''' from tkinter import * class SearchDialogBase: + '''Create most of a modal search dialog (make_frame, create_widgets). + + The wide left column contains: + 1 or 2 text entry lines (create_entries, make_entry); + a row of standard radiobuttons (create_option_buttons); + a row of dialog specific radiobuttons (create_other_buttons). + + The narrow right column contains command buttons + (create_command_buttons, make_button). + These are bound to functions that execute the command. + + Except for command buttons, this base class is not limited to + items common to all three subclasses. Rather, it is the Find dialog + minus the "Find Next" command and its execution function. + The other dialogs override methods to replace and add widgets. + ''' title = "Search Dialog" icon = "Search" -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 00:29:27 2013 From: python-checkins at python.org (terry.reedy) Date: Mon, 19 Aug 2013 00:29:27 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NTky?= =?utf-8?q?=3A_whitespace?= Message-ID: <3cJCcR1tz3z7Ljg@mail.python.org> http://hg.python.org/cpython/rev/358a2b2fbad4 changeset: 85249:358a2b2fbad4 branch: 3.3 parent: 85247:e09dfdbeb9a2 user: Terry Jan Reedy date: Sun Aug 18 18:27:02 2013 -0400 summary: Issue #18592: whitespace files: Lib/idlelib/SearchDialogBase.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/idlelib/SearchDialogBase.py b/Lib/idlelib/SearchDialogBase.py --- a/Lib/idlelib/SearchDialogBase.py +++ b/Lib/idlelib/SearchDialogBase.py @@ -8,7 +8,7 @@ 1 or 2 text entry lines (create_entries, make_entry); a row of standard radiobuttons (create_option_buttons); a row of dialog specific radiobuttons (create_other_buttons). - + The narrow right column contains command buttons (create_command_buttons, make_button). These are bound to functions that execute the command. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 00:29:28 2013 From: python-checkins at python.org (terry.reedy) Date: Mon, 19 Aug 2013 00:29:28 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_with_3=2E3?= Message-ID: <3cJCcS3f3Jz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/85e64d9021fd changeset: 85250:85e64d9021fd parent: 85248:3deccc7eee1b parent: 85249:358a2b2fbad4 user: Terry Jan Reedy date: Sun Aug 18 18:27:27 2013 -0400 summary: Merge with 3.3 files: Lib/idlelib/SearchDialogBase.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/idlelib/SearchDialogBase.py b/Lib/idlelib/SearchDialogBase.py --- a/Lib/idlelib/SearchDialogBase.py +++ b/Lib/idlelib/SearchDialogBase.py @@ -8,7 +8,7 @@ 1 or 2 text entry lines (create_entries, make_entry); a row of standard radiobuttons (create_option_buttons); a row of dialog specific radiobuttons (create_other_buttons). - + The narrow right column contains command buttons (create_command_buttons, make_button). These are bound to functions that execute the command. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 00:29:29 2013 From: python-checkins at python.org (terry.reedy) Date: Mon, 19 Aug 2013 00:29:29 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NTky?= =?utf-8?q?=3A_whitespace?= Message-ID: <3cJCcT6zL3z7Lk7@mail.python.org> http://hg.python.org/cpython/rev/1f2b78941c4a changeset: 85251:1f2b78941c4a branch: 2.7 parent: 85246:0d2b87ec9f2b user: Terry Jan Reedy date: Sun Aug 18 18:27:02 2013 -0400 summary: Issue #18592: whitespace files: Lib/idlelib/SearchDialogBase.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/idlelib/SearchDialogBase.py b/Lib/idlelib/SearchDialogBase.py --- a/Lib/idlelib/SearchDialogBase.py +++ b/Lib/idlelib/SearchDialogBase.py @@ -8,7 +8,7 @@ 1 or 2 text entry lines (create_entries, make_entry); a row of standard radiobuttons (create_option_buttons); a row of dialog specific radiobuttons (create_other_buttons). - + The narrow right column contains command buttons (create_command_buttons, make_button). These are bound to functions that execute the command. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 00:57:34 2013 From: python-checkins at python.org (andrew.kuchling) Date: Mon, 19 Aug 2013 00:57:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4NTYyOiB2YXJp?= =?utf-8?q?ous_revisions_to_the_regex_howto_for_3=2Ex?= Message-ID: <3cJDDt0Hryz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/366ca21600c9 changeset: 85252:366ca21600c9 branch: 3.3 parent: 85249:358a2b2fbad4 user: Andrew Kuchling date: Sun Aug 18 18:57:22 2013 -0400 summary: #18562: various revisions to the regex howto for 3.x * describe how \w is different when used in bytes and Unicode patterns. * describe re.ASCII flag to change that behaviour. * remove personal references ('I generally prefer...') * add some more links to the re module in the library reference * various small edits and re-wording. files: Doc/howto/regex.rst | 128 +++++++++++++++----------------- 1 files changed, 60 insertions(+), 68 deletions(-) diff --git a/Doc/howto/regex.rst b/Doc/howto/regex.rst --- a/Doc/howto/regex.rst +++ b/Doc/howto/regex.rst @@ -104,13 +104,25 @@ or ``\``, you can precede them with a backslash to remove their special meaning: ``\[`` or ``\\``. -Some of the special sequences beginning with ``'\'`` represent predefined sets -of characters that are often useful, such as the set of digits, the set of -letters, or the set of anything that isn't whitespace. The following predefined -special sequences are a subset of those available. The equivalent classes are -for bytes patterns. For a complete list of sequences and expanded class -definitions for Unicode string patterns, see the last part of -:ref:`Regular Expression Syntax `. +Some of the special sequences beginning with ``'\'`` represent +predefined sets of characters that are often useful, such as the set +of digits, the set of letters, or the set of anything that isn't +whitespace. + +Let's take an example: ``\w`` matches any alphanumeric character. If +the regex pattern is expressed in bytes, this is equivalent to the +class ``[a-zA-Z0-9_]``. If the regex pattern is a string, ``\w`` will +match all the characters marked as letters in the Unicode database +provided by the :mod:`unicodedata` module. You can use the more +restricted definition of ``\w`` in a string pattern by supplying the +:const:`re.ASCII` flag when compiling the regular expression. + +The following list of special sequences isn't complete. For a complete +list of sequences and expanded class definitions for Unicode string +patterns, see the last part of :ref:`Regular Expression Syntax +` in the Standard Library reference. In general, the +Unicode versions match any character that's in the appropriate +category in the Unicode database. ``\d`` Matches any decimal digit; this is equivalent to the class ``[0-9]``. @@ -160,9 +172,8 @@ For example, ``ca*t`` will match ``ct`` (0 ``a`` characters), ``cat`` (1 ``a``), ``caaat`` (3 ``a`` characters), and so forth. The RE engine has various internal limitations stemming from the size of C's ``int`` type that will -prevent it from matching over 2 billion ``a`` characters; you probably don't -have enough memory to construct a string that large, so you shouldn't run into -that limit. +prevent it from matching over 2 billion ``a`` characters; patterns +are usually not written to match that much data. Repetitions such as ``*`` are :dfn:`greedy`; when repeating a RE, the matching engine will try to repeat it as many times as possible. If later portions of the @@ -353,7 +364,7 @@ | | returns them as an :term:`iterator`. | +------------------+-----------------------------------------------+ -:meth:`match` and :meth:`search` return ``None`` if no match can be found. If +:meth:`~re.regex.match` and :meth:`~re.regex.search` return ``None`` if no match can be found. If they're successful, a :ref:`match object ` instance is returned, containing information about the match: where it starts and ends, the substring it matched, and more. @@ -419,8 +430,8 @@ >>> m.span() (0, 5) -:meth:`group` returns the substring that was matched by the RE. :meth:`start` -and :meth:`end` return the starting and ending index of the match. :meth:`span` +:meth:`~re.match.group` returns the substring that was matched by the RE. :meth:`~re.match.start` +and :meth:`~re.match.end` return the starting and ending index of the match. :meth:`~re.match.span` returns both start and end indexes in a single tuple. Since the :meth:`match` method only checks if the RE matches at the start of a string, :meth:`start` will always be zero. However, the :meth:`search` method of patterns @@ -448,14 +459,14 @@ print('No match') Two pattern methods return all of the matches for a pattern. -:meth:`findall` returns a list of matching strings:: +:meth:`~re.regex.findall` returns a list of matching strings:: >>> p = re.compile('\d+') >>> p.findall('12 drummers drumming, 11 pipers piping, 10 lords a-leaping') ['12', '11', '10'] :meth:`findall` has to create the entire list before it can be returned as the -result. The :meth:`finditer` method returns a sequence of +result. The :meth:`~re.regex.finditer` method returns a sequence of :ref:`match object ` instances as an :term:`iterator`:: >>> iterator = p.finditer('12 drummers drumming, 11 ... 10 ...') @@ -473,9 +484,9 @@ ---------------------- You don't have to create a pattern object and call its methods; the -:mod:`re` module also provides top-level functions called :func:`match`, -:func:`search`, :func:`findall`, :func:`sub`, and so forth. These functions -take the same arguments as the corresponding pattern method, with +:mod:`re` module also provides top-level functions called :func:`~re.match`, +:func:`~re.search`, :func:`~re.findall`, :func:`~re.sub`, and so forth. These functions +take the same arguments as the corresponding pattern method with the RE string added as the first argument, and still return either ``None`` or a :ref:`match object ` instance. :: @@ -485,26 +496,15 @@ <_sre.SRE_Match object at 0x...> Under the hood, these functions simply create a pattern object for you -and call the appropriate method on it. They also store the compiled object in a -cache, so future calls using the same RE are faster. +and call the appropriate method on it. They also store the compiled +object in a cache, so future calls using the same RE won't need to +parse the pattern again and again. Should you use these module-level functions, or should you get the -pattern and call its methods yourself? That choice depends on how -frequently the RE will be used, and on your personal coding style. If the RE is -being used at only one point in the code, then the module functions are probably -more convenient. If a program contains a lot of regular expressions, or re-uses -the same ones in several locations, then it might be worthwhile to collect all -the definitions in one place, in a section of code that compiles all the REs -ahead of time. To take an example from the standard library, here's an extract -from the now-defunct Python 2 standard :mod:`xmllib` module:: - - ref = re.compile( ... ) - entityref = re.compile( ... ) - charref = re.compile( ... ) - starttagopen = re.compile( ... ) - -I generally prefer to work with the compiled object, even for one-time uses, but -few people will be as much of a purist about this as I am. +pattern and call its methods yourself? If you're accessing a regex +within a loop, pre-compiling it will save a few function calls. +Outside of loops, there's not much difference thanks to the internal +cache. Compilation Flags @@ -524,6 +524,10 @@ +---------------------------------+--------------------------------------------+ | Flag | Meaning | +=================================+============================================+ +| :const:`ASCII`, :const:`A` | Makes several escapes like ``\w``, ``\b``, | +| | ``\s`` and ``\d`` match only on ASCII | +| | characters with the respective property. | ++---------------------------------+--------------------------------------------+ | :const:`DOTALL`, :const:`S` | Make ``.`` match any character, including | | | newlines | +---------------------------------+--------------------------------------------+ @@ -535,11 +539,7 @@ | | ``$`` | +---------------------------------+--------------------------------------------+ | :const:`VERBOSE`, :const:`X` | Enable verbose REs, which can be organized | -| | more cleanly and understandably. | -+---------------------------------+--------------------------------------------+ -| :const:`ASCII`, :const:`A` | Makes several escapes like ``\w``, ``\b``, | -| | ``\s`` and ``\d`` match only on ASCII | -| | characters with the respective property. | +| (for 'extended') | more cleanly and understandably. | +---------------------------------+--------------------------------------------+ @@ -558,7 +558,8 @@ LOCALE :noindex: - Make ``\w``, ``\W``, ``\b``, and ``\B``, dependent on the current locale. + Make ``\w``, ``\W``, ``\b``, and ``\B``, dependent on the current locale + instead of the Unicode database. Locales are a feature of the C library intended to help in writing programs that take account of language differences. For example, if you're processing French @@ -851,11 +852,10 @@ problem. Both of them use a common syntax for regular expression extensions, so we'll look at that first. -Perl 5 added several additional features to standard regular expressions, and -the Python :mod:`re` module supports most of them. It would have been -difficult to choose new single-keystroke metacharacters or new special sequences -beginning with ``\`` to represent the new features without making Perl's regular -expressions confusingly different from standard REs. If you chose ``&`` as a +Perl 5 is well-known for its powerful additions to standard regular expressions. +For these new features the Perl developers couldn't choose new single-keystroke metacharacters +or new special sequences beginning with ``\`` without making Perl's regular +expressions confusingly different from standard REs. If they chose ``&`` as a new metacharacter, for example, old expressions would be assuming that ``&`` was a regular character and wouldn't have escaped it by writing ``\&`` or ``[&]``. @@ -867,22 +867,15 @@ assertion) and ``(?:foo)`` is something else (a non-capturing group containing the subexpression ``foo``). -Python adds an extension syntax to Perl's extension syntax. If the first -character after the question mark is a ``P``, you know that it's an extension -that's specific to Python. Currently there are two such extensions: -``(?P...)`` defines a named group, and ``(?P=name)`` is a backreference to -a named group. If future versions of Perl 5 add similar features using a -different syntax, the :mod:`re` module will be changed to support the new -syntax, while preserving the Python-specific syntax for compatibility's sake. +Python supports several of Perl's extensions and adds an extension +syntax to Perl's extension syntax. If the first character after the +question mark is a ``P``, you know that it's an extension that's +specific to Python. -Now that we've looked at the general extension syntax, we can return to the -features that simplify working with groups in complex REs. Since groups are -numbered from left to right and a complex expression may use many groups, it can -become difficult to keep track of the correct numbering. Modifying such a -complex RE is annoying, too: insert a new group near the beginning and you -change the numbers of everything that follows it. +Now that we've looked at the general extension syntax, we can return +to the features that simplify working with groups in complex REs. -Sometimes you'll want to use a group to collect a part of a regular expression, +Sometimes you'll want to use a group to denote a part of a regular expression, but aren't interested in retrieving the group's contents. You can make this fact explicit by using a non-capturing group: ``(?:...)``, where you can replace the ``...`` with any other regular expression. :: @@ -908,7 +901,7 @@ The syntax for a named group is one of the Python-specific extensions: ``(?P...)``. *name* is, obviously, the name of the group. Named groups -also behave exactly like capturing groups, and additionally associate a name +behave exactly like capturing groups, and additionally associate a name with a group. The :ref:`match object ` methods that deal with capturing groups all accept either integers that refer to the group by number or strings that contain the desired group's name. Named groups are still @@ -975,9 +968,10 @@ ``.*[.].*$`` Notice that the ``.`` needs to be treated specially because it's a -metacharacter; I've put it inside a character class. Also notice the trailing -``$``; this is added to ensure that all the rest of the string must be included -in the extension. This regular expression matches ``foo.bar`` and +metacharacter, so it's inside a character class to only match that +specific character. Also notice the trailing ``$``; this is added to +ensure that all the rest of the string must be included in the +extension. This regular expression matches ``foo.bar`` and ``autoexec.bat`` and ``sendmail.cf`` and ``printers.conf``. Now, consider complicating the problem a bit; what if you want to match @@ -1051,7 +1045,7 @@ The :meth:`split` method of a pattern splits a string apart wherever the RE matches, returning a list of the pieces. It's similar to the :meth:`split` method of strings but provides much more generality in the -delimiters that you can split by; :meth:`split` only supports splitting by +delimiters that you can split by; string :meth:`split` only supports splitting by whitespace or by a fixed string. As you'd expect, there's a module-level :func:`re.split` function, too. @@ -1106,7 +1100,6 @@ with a different string. The :meth:`sub` method takes a replacement value, which can be either a string or a function, and the string to be processed. - .. method:: .sub(replacement, string[, count=0]) :noindex: @@ -1362,4 +1355,3 @@ reference for programming in Python. (The first edition covered Python's now-removed :mod:`regex` module, which won't help you much.) Consider checking it out from your library. - -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 01:01:06 2013 From: python-checkins at python.org (andrew.kuchling) Date: Mon, 19 Aug 2013 01:01:06 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_from_3=2E3?= Message-ID: <3cJDJy2jKDz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/ae77b39e67fa changeset: 85253:ae77b39e67fa parent: 85250:85e64d9021fd parent: 85252:366ca21600c9 user: Andrew Kuchling date: Sun Aug 18 18:58:27 2013 -0400 summary: Merge from 3.3 files: Doc/howto/regex.rst | 128 +++++++++++++++----------------- 1 files changed, 60 insertions(+), 68 deletions(-) diff --git a/Doc/howto/regex.rst b/Doc/howto/regex.rst --- a/Doc/howto/regex.rst +++ b/Doc/howto/regex.rst @@ -104,13 +104,25 @@ or ``\``, you can precede them with a backslash to remove their special meaning: ``\[`` or ``\\``. -Some of the special sequences beginning with ``'\'`` represent predefined sets -of characters that are often useful, such as the set of digits, the set of -letters, or the set of anything that isn't whitespace. The following predefined -special sequences are a subset of those available. The equivalent classes are -for bytes patterns. For a complete list of sequences and expanded class -definitions for Unicode string patterns, see the last part of -:ref:`Regular Expression Syntax `. +Some of the special sequences beginning with ``'\'`` represent +predefined sets of characters that are often useful, such as the set +of digits, the set of letters, or the set of anything that isn't +whitespace. + +Let's take an example: ``\w`` matches any alphanumeric character. If +the regex pattern is expressed in bytes, this is equivalent to the +class ``[a-zA-Z0-9_]``. If the regex pattern is a string, ``\w`` will +match all the characters marked as letters in the Unicode database +provided by the :mod:`unicodedata` module. You can use the more +restricted definition of ``\w`` in a string pattern by supplying the +:const:`re.ASCII` flag when compiling the regular expression. + +The following list of special sequences isn't complete. For a complete +list of sequences and expanded class definitions for Unicode string +patterns, see the last part of :ref:`Regular Expression Syntax +` in the Standard Library reference. In general, the +Unicode versions match any character that's in the appropriate +category in the Unicode database. ``\d`` Matches any decimal digit; this is equivalent to the class ``[0-9]``. @@ -160,9 +172,8 @@ For example, ``ca*t`` will match ``ct`` (0 ``a`` characters), ``cat`` (1 ``a``), ``caaat`` (3 ``a`` characters), and so forth. The RE engine has various internal limitations stemming from the size of C's ``int`` type that will -prevent it from matching over 2 billion ``a`` characters; you probably don't -have enough memory to construct a string that large, so you shouldn't run into -that limit. +prevent it from matching over 2 billion ``a`` characters; patterns +are usually not written to match that much data. Repetitions such as ``*`` are :dfn:`greedy`; when repeating a RE, the matching engine will try to repeat it as many times as possible. If later portions of the @@ -353,7 +364,7 @@ | | returns them as an :term:`iterator`. | +------------------+-----------------------------------------------+ -:meth:`match` and :meth:`search` return ``None`` if no match can be found. If +:meth:`~re.regex.match` and :meth:`~re.regex.search` return ``None`` if no match can be found. If they're successful, a :ref:`match object ` instance is returned, containing information about the match: where it starts and ends, the substring it matched, and more. @@ -419,8 +430,8 @@ >>> m.span() (0, 5) -:meth:`group` returns the substring that was matched by the RE. :meth:`start` -and :meth:`end` return the starting and ending index of the match. :meth:`span` +:meth:`~re.match.group` returns the substring that was matched by the RE. :meth:`~re.match.start` +and :meth:`~re.match.end` return the starting and ending index of the match. :meth:`~re.match.span` returns both start and end indexes in a single tuple. Since the :meth:`match` method only checks if the RE matches at the start of a string, :meth:`start` will always be zero. However, the :meth:`search` method of patterns @@ -448,14 +459,14 @@ print('No match') Two pattern methods return all of the matches for a pattern. -:meth:`findall` returns a list of matching strings:: +:meth:`~re.regex.findall` returns a list of matching strings:: >>> p = re.compile('\d+') >>> p.findall('12 drummers drumming, 11 pipers piping, 10 lords a-leaping') ['12', '11', '10'] :meth:`findall` has to create the entire list before it can be returned as the -result. The :meth:`finditer` method returns a sequence of +result. The :meth:`~re.regex.finditer` method returns a sequence of :ref:`match object ` instances as an :term:`iterator`:: >>> iterator = p.finditer('12 drummers drumming, 11 ... 10 ...') @@ -473,9 +484,9 @@ ---------------------- You don't have to create a pattern object and call its methods; the -:mod:`re` module also provides top-level functions called :func:`match`, -:func:`search`, :func:`findall`, :func:`sub`, and so forth. These functions -take the same arguments as the corresponding pattern method, with +:mod:`re` module also provides top-level functions called :func:`~re.match`, +:func:`~re.search`, :func:`~re.findall`, :func:`~re.sub`, and so forth. These functions +take the same arguments as the corresponding pattern method with the RE string added as the first argument, and still return either ``None`` or a :ref:`match object ` instance. :: @@ -485,26 +496,15 @@ <_sre.SRE_Match object at 0x...> Under the hood, these functions simply create a pattern object for you -and call the appropriate method on it. They also store the compiled object in a -cache, so future calls using the same RE are faster. +and call the appropriate method on it. They also store the compiled +object in a cache, so future calls using the same RE won't need to +parse the pattern again and again. Should you use these module-level functions, or should you get the -pattern and call its methods yourself? That choice depends on how -frequently the RE will be used, and on your personal coding style. If the RE is -being used at only one point in the code, then the module functions are probably -more convenient. If a program contains a lot of regular expressions, or re-uses -the same ones in several locations, then it might be worthwhile to collect all -the definitions in one place, in a section of code that compiles all the REs -ahead of time. To take an example from the standard library, here's an extract -from the now-defunct Python 2 standard :mod:`xmllib` module:: - - ref = re.compile( ... ) - entityref = re.compile( ... ) - charref = re.compile( ... ) - starttagopen = re.compile( ... ) - -I generally prefer to work with the compiled object, even for one-time uses, but -few people will be as much of a purist about this as I am. +pattern and call its methods yourself? If you're accessing a regex +within a loop, pre-compiling it will save a few function calls. +Outside of loops, there's not much difference thanks to the internal +cache. Compilation Flags @@ -524,6 +524,10 @@ +---------------------------------+--------------------------------------------+ | Flag | Meaning | +=================================+============================================+ +| :const:`ASCII`, :const:`A` | Makes several escapes like ``\w``, ``\b``, | +| | ``\s`` and ``\d`` match only on ASCII | +| | characters with the respective property. | ++---------------------------------+--------------------------------------------+ | :const:`DOTALL`, :const:`S` | Make ``.`` match any character, including | | | newlines | +---------------------------------+--------------------------------------------+ @@ -535,11 +539,7 @@ | | ``$`` | +---------------------------------+--------------------------------------------+ | :const:`VERBOSE`, :const:`X` | Enable verbose REs, which can be organized | -| | more cleanly and understandably. | -+---------------------------------+--------------------------------------------+ -| :const:`ASCII`, :const:`A` | Makes several escapes like ``\w``, ``\b``, | -| | ``\s`` and ``\d`` match only on ASCII | -| | characters with the respective property. | +| (for 'extended') | more cleanly and understandably. | +---------------------------------+--------------------------------------------+ @@ -558,7 +558,8 @@ LOCALE :noindex: - Make ``\w``, ``\W``, ``\b``, and ``\B``, dependent on the current locale. + Make ``\w``, ``\W``, ``\b``, and ``\B``, dependent on the current locale + instead of the Unicode database. Locales are a feature of the C library intended to help in writing programs that take account of language differences. For example, if you're processing French @@ -851,11 +852,10 @@ problem. Both of them use a common syntax for regular expression extensions, so we'll look at that first. -Perl 5 added several additional features to standard regular expressions, and -the Python :mod:`re` module supports most of them. It would have been -difficult to choose new single-keystroke metacharacters or new special sequences -beginning with ``\`` to represent the new features without making Perl's regular -expressions confusingly different from standard REs. If you chose ``&`` as a +Perl 5 is well-known for its powerful additions to standard regular expressions. +For these new features the Perl developers couldn't choose new single-keystroke metacharacters +or new special sequences beginning with ``\`` without making Perl's regular +expressions confusingly different from standard REs. If they chose ``&`` as a new metacharacter, for example, old expressions would be assuming that ``&`` was a regular character and wouldn't have escaped it by writing ``\&`` or ``[&]``. @@ -867,22 +867,15 @@ assertion) and ``(?:foo)`` is something else (a non-capturing group containing the subexpression ``foo``). -Python adds an extension syntax to Perl's extension syntax. If the first -character after the question mark is a ``P``, you know that it's an extension -that's specific to Python. Currently there are two such extensions: -``(?P...)`` defines a named group, and ``(?P=name)`` is a backreference to -a named group. If future versions of Perl 5 add similar features using a -different syntax, the :mod:`re` module will be changed to support the new -syntax, while preserving the Python-specific syntax for compatibility's sake. +Python supports several of Perl's extensions and adds an extension +syntax to Perl's extension syntax. If the first character after the +question mark is a ``P``, you know that it's an extension that's +specific to Python. -Now that we've looked at the general extension syntax, we can return to the -features that simplify working with groups in complex REs. Since groups are -numbered from left to right and a complex expression may use many groups, it can -become difficult to keep track of the correct numbering. Modifying such a -complex RE is annoying, too: insert a new group near the beginning and you -change the numbers of everything that follows it. +Now that we've looked at the general extension syntax, we can return +to the features that simplify working with groups in complex REs. -Sometimes you'll want to use a group to collect a part of a regular expression, +Sometimes you'll want to use a group to denote a part of a regular expression, but aren't interested in retrieving the group's contents. You can make this fact explicit by using a non-capturing group: ``(?:...)``, where you can replace the ``...`` with any other regular expression. :: @@ -908,7 +901,7 @@ The syntax for a named group is one of the Python-specific extensions: ``(?P...)``. *name* is, obviously, the name of the group. Named groups -also behave exactly like capturing groups, and additionally associate a name +behave exactly like capturing groups, and additionally associate a name with a group. The :ref:`match object ` methods that deal with capturing groups all accept either integers that refer to the group by number or strings that contain the desired group's name. Named groups are still @@ -975,9 +968,10 @@ ``.*[.].*$`` Notice that the ``.`` needs to be treated specially because it's a -metacharacter; I've put it inside a character class. Also notice the trailing -``$``; this is added to ensure that all the rest of the string must be included -in the extension. This regular expression matches ``foo.bar`` and +metacharacter, so it's inside a character class to only match that +specific character. Also notice the trailing ``$``; this is added to +ensure that all the rest of the string must be included in the +extension. This regular expression matches ``foo.bar`` and ``autoexec.bat`` and ``sendmail.cf`` and ``printers.conf``. Now, consider complicating the problem a bit; what if you want to match @@ -1051,7 +1045,7 @@ The :meth:`split` method of a pattern splits a string apart wherever the RE matches, returning a list of the pieces. It's similar to the :meth:`split` method of strings but provides much more generality in the -delimiters that you can split by; :meth:`split` only supports splitting by +delimiters that you can split by; string :meth:`split` only supports splitting by whitespace or by a fixed string. As you'd expect, there's a module-level :func:`re.split` function, too. @@ -1106,7 +1100,6 @@ with a different string. The :meth:`sub` method takes a replacement value, which can be either a string or a function, and the string to be processed. - .. method:: .sub(replacement, string[, count=0]) :noindex: @@ -1362,4 +1355,3 @@ reference for programming in Python. (The first edition covered Python's now-removed :mod:`regex` module, which won't help you much.) Consider checking it out from your library. - -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 01:56:16 2013 From: python-checkins at python.org (andrew.kuchling) Date: Mon, 19 Aug 2013 01:56:16 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_=2318445=3A_change_permiss?= =?utf-8?q?ions_on_some_scripts_to_executable?= Message-ID: <3cJFXc4ThFz7LjR@mail.python.org> http://hg.python.org/cpython/rev/9d5468a7a381 changeset: 85254:9d5468a7a381 user: Andrew Kuchling date: Sun Aug 18 19:48:12 2013 -0400 summary: #18445: change permissions on some scripts to executable files: Tools/scripts/analyze_dxp.py | 0 Tools/scripts/parse_html5_entities.py | 0 Tools/scripts/pydocgui.pyw | 0 Tools/scripts/win_add2path.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) diff --git a/Tools/scripts/analyze_dxp.py b/Tools/scripts/analyze_dxp.py old mode 100644 new mode 100755 diff --git a/Tools/scripts/parse_html5_entities.py b/Tools/scripts/parse_html5_entities.py old mode 100644 new mode 100755 diff --git a/Tools/scripts/pydocgui.pyw b/Tools/scripts/pydocgui.pyw old mode 100644 new mode 100755 diff --git a/Tools/scripts/win_add2path.py b/Tools/scripts/win_add2path.py old mode 100644 new mode 100755 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 02:07:20 2013 From: python-checkins at python.org (andrew.kuchling) Date: Mon, 19 Aug 2013 02:07:20 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_=2318445=3A_update_Tools/s?= =?utf-8?q?cripts/README=2E?= Message-ID: <3cJFnN0FWXz7Ljb@mail.python.org> http://hg.python.org/cpython/rev/16fad6c48016 changeset: 85255:16fad6c48016 user: Andrew Kuchling date: Sun Aug 18 20:04:34 2013 -0400 summary: #18445: update Tools/scripts/README. - document abitype.py, analyze_dxp.py, get-remote-certificate.py, import_diagnostics.py, parse_html5_entities.py. - remove redemo.py which was moved to Tools/Demo. - sort list into alphabetical order Patch by Seydou Dia. files: Tools/scripts/README | 126 +++++++++++++++--------------- 1 files changed, 65 insertions(+), 61 deletions(-) diff --git a/Tools/scripts/README b/Tools/scripts/README --- a/Tools/scripts/README +++ b/Tools/scripts/README @@ -2,64 +2,68 @@ useful while building, extending or managing Python. Some (e.g., dutree or lll) are also generally useful UNIX tools. -2to3 Main script for running the 2to3 conversion tool -analyze_dxp.py Analyzes the result of sys.getdxp() -byext.py Print lines/words/chars stats of files by extension -byteyears.py Print product of a file's size and age -checkpyc.py Check presence and validity of ".pyc" files -cleanfuture.py Fix redundant Python __future__ statements -combinerefs.py A helper for analyzing PYTHONDUMPREFS output -copytime.py Copy one file's atime and mtime to another -crlf.py Change CRLF line endings to LF (Windows to Unix) -db2pickle.py Dump a database file to a pickle -diff.py Print file diffs in context, unified, or ndiff formats -dutree.py Format du(1) output as a tree sorted by size -eptags.py Create Emacs TAGS file for Python modules -find_recursionlimit.py Find the maximum recursion limit on this machine -finddiv.py A grep-like tool that looks for division operators -findlinksto.py Recursively find symbolic links to a given path prefix -findnocoding.py Find source files which need an encoding declaration -fixcid.py Massive identifier substitution on C source files -fixdiv.py Tool to fix division operators. -fixheader.py Add some cpp magic to a C include file -fixnotice.py Fix the copyright notice in source files -fixps.py Fix Python scripts' first line (if #!) -ftpmirror.py FTP mirror script -google.py Open a webbrowser with Google -gprof2html.py Transform gprof(1) output into useful HTML -h2py.py Translate #define's into Python assignments -highlight.py Python syntax highlighting with HTML output -idle3 Main program to start IDLE -ifdef.py Remove #if(n)def groups from C sources -lfcr.py Change LF line endings to CRLF (Unix to Windows) -linktree.py Make a copy of a tree with links to original files -lll.py Find and list symbolic links in current directory -mailerdaemon.py Parse error messages from mailer daemons (Sjoerd&Jack) -make_ctype.py Generate ctype.h replacement in stringobject.c -md5sum.py Print MD5 checksums of argument files -mkreal.py Turn a symbolic link into a real file or directory -ndiff.py Intelligent diff between text files (Tim Peters) -nm2def.py Create a template for PC/python_nt.def (Marc Lemburg) -objgraph.py Print object graph from nm output on a library -parseentities.py Utility for parsing HTML entity definitions -patchcheck.py Perform common checks and cleanup before committing -pathfix.py Change #!/usr/local/bin/python into something else -pdeps.py Print dependencies between Python modules -pickle2db.py Load a pickle generated by db2pickle.py to a database -pindent.py Indent Python code, giving block-closing comments -ptags.py Create vi tags file for Python modules -pydoc3 Python documentation browser -pysource.py Find Python source files -redemo.py Basic regular expression demonstration facility -reindent.py Change .py files to use 4-space indents -reindent-rst.py Fix-up reStructuredText file whitespace -rgrep.py Reverse grep through a file (useful for big logfiles) -run_tests.py Run the test suite with more sensible default options -serve.py Small wsgiref-based web server, used in make serve in Doc -suff.py Sort a list of files by suffix -svneol.py Set svn:eol-style on all files in directory -texi2html.py Convert GNU texinfo files into HTML -treesync.py Synchronize source trees (very idiosyncratic) -untabify.py Replace tabs with spaces in argument files -win_add2path.py Add Python to the search path on Windows -which.py Find a program in $PATH +2to3 Main script for running the 2to3 conversion tool +abitype.py Converts a C file to use the PEP 384 type definition API +analyze_dxp.py Analyzes the result of sys.getdxp() +byext.py Print lines/words/chars stats of files by extension +byteyears.py Print product of a file's size and age +checkpyc.py Check presence and validity of ".pyc" files +cleanfuture.py Fix redundant Python __future__ statements +combinerefs.py A helper for analyzing PYTHONDUMPREFS output +copytime.py Copy one file's atime and mtime to another +crlf.py Change CRLF line endings to LF (Windows to Unix) +db2pickle.py Dump a database file to a pickle +diff.py Print file diffs in context, unified, or ndiff formats +dutree.py Format du(1) output as a tree sorted by size +eptags.py Create Emacs TAGS file for Python modules +finddiv.py A grep-like tool that looks for division operators +findlinksto.py Recursively find symbolic links to a given path prefix +findnocoding.py Find source files which need an encoding declaration +find_recursionlimit.py Find the maximum recursion limit on this machine +find-uname.py Look for the given arguments in the sets of all Unicode names +fixcid.py Massive identifier substitution on C source files +fixdiv.py Tool to fix division operators. +fixheader.py Add some cpp magic to a C include file +fixnotice.py Fix the copyright notice in source files +fixps.py Fix Python scripts' first line (if #!) +ftpmirror.py FTP mirror script +get-remote-certificate.py Fetch the certificate that the server(s) are providing in PEM form +google.py Open a webbrowser with Google +gprof2html.py Transform gprof(1) output into useful HTML +h2py.py Translate #define's into Python assignments +highlight.py Python syntax highlighting with HTML output +idle3 Main program to start IDLE +ifdef.py Remove #if(n)def groups from C sources +import_diagnostics.py Miscellaneous diagnostics for the import system +lfcr.py Change LF line endings to CRLF (Unix to Windows) +linktree.py Make a copy of a tree with links to original files +lll.py Find and list symbolic links in current directory +mailerdaemon.py Parse error messages from mailer daemons (Sjoerd&Jack) +make_ctype.py Generate ctype.h replacement in stringobject.c +md5sum.py Print MD5 checksums of argument files +mkreal.py Turn a symbolic link into a real file or directory +ndiff.py Intelligent diff between text files (Tim Peters) +nm2def.py Create a template for PC/python_nt.def (Marc Lemburg) +objgraph.py Print object graph from nm output on a library +parseentities.py Utility for parsing HTML entity definitions +parse_html5_entities.py Utility for parsing HTML5 entity definitions +patchcheck.py Perform common checks and cleanup before committing +pathfix.py Change #!/usr/local/bin/python into something else +pdeps.py Print dependencies between Python modules +pickle2db.py Load a pickle generated by db2pickle.py to a database +pindent.py Indent Python code, giving block-closing comments +ptags.py Create vi tags file for Python modules +pydoc3 Python documentation browser +pysource.py Find Python source files +reindent.py Change .py files to use 4-space indents +reindent-rst.py Fix-up reStructuredText file whitespace +rgrep.py Reverse grep through a file (useful for big logfiles) +run_tests.py Run the test suite with more sensible default options +serve.py Small wsgiref-based web server, used in make serve in Doc +suff.py Sort a list of files by suffix +svneol.py Set svn:eol-style on all files in directory +texi2html.py Convert GNU texinfo files into HTML +treesync.py Synchronize source trees (very idiosyncratic) +untabify.py Replace tabs with spaces in argument files +which.py Find a program in $PATH +win_add2path.py Add Python to the search path on Windows -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Mon Aug 19 06:10:38 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Mon, 19 Aug 2013 06:10:38 +0200 Subject: [Python-checkins] Daily reference leaks (16fad6c48016): sum=0 Message-ID: results for 16fad6c48016 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogYsrV4i', '-x'] From python-checkins at python.org Mon Aug 19 07:06:01 2013 From: python-checkins at python.org (terry.reedy) Date: Mon, 19 Aug 2013 07:06:01 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NDg5?= =?utf-8?q?=3A_idlelib=2ESearchEngine_-_add_docstrings?= Message-ID: <3cJNQ14q6Mz7LjR@mail.python.org> http://hg.python.org/cpython/rev/310d187020e3 changeset: 85256:310d187020e3 branch: 2.7 parent: 85251:1f2b78941c4a user: Terry Jan Reedy date: Mon Aug 19 01:05:09 2013 -0400 summary: Issue #18489: idlelib.SearchEngine - add docstrings (original patch by Phil Webster). files: Lib/idlelib/SearchEngine.py | 62 ++++++++++++++---------- 1 files changed, 37 insertions(+), 25 deletions(-) diff --git a/Lib/idlelib/SearchEngine.py b/Lib/idlelib/SearchEngine.py --- a/Lib/idlelib/SearchEngine.py +++ b/Lib/idlelib/SearchEngine.py @@ -1,19 +1,28 @@ +'''Define SearchEngine for search dialogs.''' import re from Tkinter import * import tkMessageBox def get(root): + '''Return the singleton SearchEngine instance for the process. + + The single SearchEngine saves settings between dialog instances. + If there is not a SearchEngine already, make one. + ''' if not hasattr(root, "_searchengine"): root._searchengine = SearchEngine(root) - # XXX This will never garbage-collect -- who cares + # This creates a cycle that persists until root is deleted. return root._searchengine class SearchEngine: + """Handles searching a text widget for Find, Replace, and Grep.""" def __init__(self, root): + '''Initialize Variables that save search state. + + The dialogs bind these to the UI elements present in the dialogs. + ''' self.root = root - # State shared by search, replace, and grep; - # the search dialogs bind these to UI elements. self.patvar = StringVar(root) # search pattern self.revar = BooleanVar(root) # regular expression? self.casevar = BooleanVar(root) # match case? @@ -56,6 +65,7 @@ return pat def getprog(self): + "Return compiled cooked search pattern." pat = self.getpat() if not pat: self.report_error(pat, "Empty regular expression") @@ -77,7 +87,7 @@ return prog def report_error(self, pat, msg, col=-1): - # Derived class could overrid this with something fancier + # Derived class could override this with something fancier msg = "Error: " + str(msg) if pat: msg = msg + "\np\Pattern: " + str(pat) @@ -92,25 +102,23 @@ self.setpat(pat) def search_text(self, text, prog=None, ok=0): - """Search a text widget for the pattern. + '''Return (lineno, matchobj) for prog in text widget, or None. - If prog is given, it should be the precompiled pattern. - Return a tuple (lineno, matchobj); None if not found. + If prog is given, it should be a precompiled pattern. + Wrap (yes/no) and direction (forward/back) settings are used. - This obeys the wrap and direction (back) settings. - - The search starts at the selection (if there is one) or - at the insert mark (otherwise). If the search is forward, - it starts at the right of the selection; for a backward - search, it starts at the left end. An empty match exactly - at either end of the selection (or at the insert mark if - there is no selection) is ignored unless the ok flag is true - -- this is done to guarantee progress. + The search starts at the selection (if there is one) or at the + insert mark (otherwise). If the search is forward, it starts + at the right of the selection; for a backward search, it + starts at the left end. An empty match exactly at either end + of the selection (or at the insert mark if there is no + selection) is ignored unless the ok flag is true -- this is + done to guarantee progress. If the search is allowed to wrap around, it will return the original selection if (and only if) it is the only match. + ''' - """ if not prog: prog = self.getprog() if not prog: @@ -179,10 +187,11 @@ col = len(chars) - 1 return None -# Helper to search backwards in a string. -# (Optimized for the case where the pattern isn't found.) +def search_reverse(prog, chars, col): + '''Search backwards in a string (line of text). -def search_reverse(prog, chars, col): + This is done by searching forwards until there is no match. + ''' m = prog.search(chars) if not m: return None @@ -198,10 +207,9 @@ i, j = m.span() return found -# Helper to get selection end points, defaulting to insert mark. -# Return a tuple of indices ("line.col" strings). - def get_selection(text): + '''Return tuple of 'line.col' indexes from selection or insert mark. + ''' try: first = text.index("sel.first") last = text.index("sel.last") @@ -213,8 +221,12 @@ last = first return first, last -# Helper to parse a text index into a (line, col) tuple. - def get_line_col(index): + '''Return (line, col) tuple of ints from 'line.col' string.''' line, col = map(int, index.split(".")) # Fails on invalid index return line, col + +##if __name__ == "__main__": +## from test import support; support.use_resources = ['gui'] +## import unittest +## unittest.main('idlelib.idle_test.test_searchengine', verbosity=2, exit=False) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 07:06:03 2013 From: python-checkins at python.org (terry.reedy) Date: Mon, 19 Aug 2013 07:06:03 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NDg5?= =?utf-8?q?=3A_idlelib=2ESearchEngine_-_add_docstrings?= Message-ID: <3cJNQ30jsfz7Ljs@mail.python.org> http://hg.python.org/cpython/rev/cfb510884a13 changeset: 85257:cfb510884a13 branch: 3.3 parent: 85252:366ca21600c9 user: Terry Jan Reedy date: Mon Aug 19 01:05:19 2013 -0400 summary: Issue #18489: idlelib.SearchEngine - add docstrings (original patch by Phil Webster). files: Lib/idlelib/SearchEngine.py | 62 ++++++++++++++---------- 1 files changed, 37 insertions(+), 25 deletions(-) diff --git a/Lib/idlelib/SearchEngine.py b/Lib/idlelib/SearchEngine.py --- a/Lib/idlelib/SearchEngine.py +++ b/Lib/idlelib/SearchEngine.py @@ -1,19 +1,28 @@ +'''Define SearchEngine for search dialogs.''' import re from tkinter import * import tkinter.messagebox as tkMessageBox def get(root): + '''Return the singleton SearchEngine instance for the process. + + The single SearchEngine saves settings between dialog instances. + If there is not a SearchEngine already, make one. + ''' if not hasattr(root, "_searchengine"): root._searchengine = SearchEngine(root) - # XXX This will never garbage-collect -- who cares + # This creates a cycle that persists until root is deleted. return root._searchengine class SearchEngine: + """Handles searching a text widget for Find, Replace, and Grep.""" def __init__(self, root): + '''Initialize Variables that save search state. + + The dialogs bind these to the UI elements present in the dialogs. + ''' self.root = root - # State shared by search, replace, and grep; - # the search dialogs bind these to UI elements. self.patvar = StringVar(root) # search pattern self.revar = BooleanVar(root) # regular expression? self.casevar = BooleanVar(root) # match case? @@ -56,6 +65,7 @@ return pat def getprog(self): + "Return compiled cooked search pattern." pat = self.getpat() if not pat: self.report_error(pat, "Empty regular expression") @@ -77,7 +87,7 @@ return prog def report_error(self, pat, msg, col=-1): - # Derived class could overrid this with something fancier + # Derived class could override this with something fancier msg = "Error: " + str(msg) if pat: msg = msg + "\np\Pattern: " + str(pat) @@ -92,25 +102,23 @@ self.setpat(pat) def search_text(self, text, prog=None, ok=0): - """Search a text widget for the pattern. + '''Return (lineno, matchobj) for prog in text widget, or None. - If prog is given, it should be the precompiled pattern. - Return a tuple (lineno, matchobj); None if not found. + If prog is given, it should be a precompiled pattern. + Wrap (yes/no) and direction (forward/back) settings are used. - This obeys the wrap and direction (back) settings. - - The search starts at the selection (if there is one) or - at the insert mark (otherwise). If the search is forward, - it starts at the right of the selection; for a backward - search, it starts at the left end. An empty match exactly - at either end of the selection (or at the insert mark if - there is no selection) is ignored unless the ok flag is true - -- this is done to guarantee progress. + The search starts at the selection (if there is one) or at the + insert mark (otherwise). If the search is forward, it starts + at the right of the selection; for a backward search, it + starts at the left end. An empty match exactly at either end + of the selection (or at the insert mark if there is no + selection) is ignored unless the ok flag is true -- this is + done to guarantee progress. If the search is allowed to wrap around, it will return the original selection if (and only if) it is the only match. + ''' - """ if not prog: prog = self.getprog() if not prog: @@ -179,10 +187,11 @@ col = len(chars) - 1 return None -# Helper to search backwards in a string. -# (Optimized for the case where the pattern isn't found.) +def search_reverse(prog, chars, col): + '''Search backwards in a string (line of text). -def search_reverse(prog, chars, col): + This is done by searching forwards until there is no match. + ''' m = prog.search(chars) if not m: return None @@ -198,10 +207,9 @@ i, j = m.span() return found -# Helper to get selection end points, defaulting to insert mark. -# Return a tuple of indices ("line.col" strings). - def get_selection(text): + '''Return tuple of 'line.col' indexes from selection or insert mark. + ''' try: first = text.index("sel.first") last = text.index("sel.last") @@ -213,8 +221,12 @@ last = first return first, last -# Helper to parse a text index into a (line, col) tuple. - def get_line_col(index): + '''Return (line, col) tuple of ints from 'line.col' string.''' line, col = map(int, index.split(".")) # Fails on invalid index return line, col + +##if __name__ == "__main__": +## from test import support; support.use_resources = ['gui'] +## import unittest +## unittest.main('idlelib.idle_test.test_searchengine', verbosity=2, exit=False) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 07:06:04 2013 From: python-checkins at python.org (terry.reedy) Date: Mon, 19 Aug 2013 07:06:04 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_with_3=2E3?= Message-ID: <3cJNQ43jXfz7Ljr@mail.python.org> http://hg.python.org/cpython/rev/97e38b957041 changeset: 85258:97e38b957041 parent: 85255:16fad6c48016 parent: 85257:cfb510884a13 user: Terry Jan Reedy date: Mon Aug 19 01:05:41 2013 -0400 summary: Merge with 3.3 files: Lib/idlelib/SearchEngine.py | 62 ++++++++++++++---------- 1 files changed, 37 insertions(+), 25 deletions(-) diff --git a/Lib/idlelib/SearchEngine.py b/Lib/idlelib/SearchEngine.py --- a/Lib/idlelib/SearchEngine.py +++ b/Lib/idlelib/SearchEngine.py @@ -1,19 +1,28 @@ +'''Define SearchEngine for search dialogs.''' import re from tkinter import * import tkinter.messagebox as tkMessageBox def get(root): + '''Return the singleton SearchEngine instance for the process. + + The single SearchEngine saves settings between dialog instances. + If there is not a SearchEngine already, make one. + ''' if not hasattr(root, "_searchengine"): root._searchengine = SearchEngine(root) - # XXX This will never garbage-collect -- who cares + # This creates a cycle that persists until root is deleted. return root._searchengine class SearchEngine: + """Handles searching a text widget for Find, Replace, and Grep.""" def __init__(self, root): + '''Initialize Variables that save search state. + + The dialogs bind these to the UI elements present in the dialogs. + ''' self.root = root - # State shared by search, replace, and grep; - # the search dialogs bind these to UI elements. self.patvar = StringVar(root) # search pattern self.revar = BooleanVar(root) # regular expression? self.casevar = BooleanVar(root) # match case? @@ -56,6 +65,7 @@ return pat def getprog(self): + "Return compiled cooked search pattern." pat = self.getpat() if not pat: self.report_error(pat, "Empty regular expression") @@ -77,7 +87,7 @@ return prog def report_error(self, pat, msg, col=-1): - # Derived class could overrid this with something fancier + # Derived class could override this with something fancier msg = "Error: " + str(msg) if pat: msg = msg + "\np\Pattern: " + str(pat) @@ -92,25 +102,23 @@ self.setpat(pat) def search_text(self, text, prog=None, ok=0): - """Search a text widget for the pattern. + '''Return (lineno, matchobj) for prog in text widget, or None. - If prog is given, it should be the precompiled pattern. - Return a tuple (lineno, matchobj); None if not found. + If prog is given, it should be a precompiled pattern. + Wrap (yes/no) and direction (forward/back) settings are used. - This obeys the wrap and direction (back) settings. - - The search starts at the selection (if there is one) or - at the insert mark (otherwise). If the search is forward, - it starts at the right of the selection; for a backward - search, it starts at the left end. An empty match exactly - at either end of the selection (or at the insert mark if - there is no selection) is ignored unless the ok flag is true - -- this is done to guarantee progress. + The search starts at the selection (if there is one) or at the + insert mark (otherwise). If the search is forward, it starts + at the right of the selection; for a backward search, it + starts at the left end. An empty match exactly at either end + of the selection (or at the insert mark if there is no + selection) is ignored unless the ok flag is true -- this is + done to guarantee progress. If the search is allowed to wrap around, it will return the original selection if (and only if) it is the only match. + ''' - """ if not prog: prog = self.getprog() if not prog: @@ -179,10 +187,11 @@ col = len(chars) - 1 return None -# Helper to search backwards in a string. -# (Optimized for the case where the pattern isn't found.) +def search_reverse(prog, chars, col): + '''Search backwards in a string (line of text). -def search_reverse(prog, chars, col): + This is done by searching forwards until there is no match. + ''' m = prog.search(chars) if not m: return None @@ -198,10 +207,9 @@ i, j = m.span() return found -# Helper to get selection end points, defaulting to insert mark. -# Return a tuple of indices ("line.col" strings). - def get_selection(text): + '''Return tuple of 'line.col' indexes from selection or insert mark. + ''' try: first = text.index("sel.first") last = text.index("sel.last") @@ -213,8 +221,12 @@ last = first return first, last -# Helper to parse a text index into a (line, col) tuple. - def get_line_col(index): + '''Return (line, col) tuple of ints from 'line.col' string.''' line, col = map(int, index.split(".")) # Fails on invalid index return line, col + +##if __name__ == "__main__": +## from test import support; support.use_resources = ['gui'] +## import unittest +## unittest.main('idlelib.idle_test.test_searchengine', verbosity=2, exit=False) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 09:05:10 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 09:05:10 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzYx?= =?utf-8?q?=3A_Improved_cross-references_in_email_documentation=2E?= Message-ID: <3cJR3V0FLrz7Ljt@mail.python.org> http://hg.python.org/cpython/rev/7a1534dba050 changeset: 85259:7a1534dba050 branch: 3.3 parent: 85257:cfb510884a13 user: Serhiy Storchaka date: Mon Aug 19 09:59:18 2013 +0300 summary: Issue #18761: Improved cross-references in email documentation. files: Doc/library/email.charset.rst | 2 +- Doc/library/email.errors.rst | 29 +- Doc/library/email.headerregistry.rst | 15 +- Doc/library/email.iterators.rst | 11 +- Doc/library/email.message.rst | 11 +- Doc/library/email.mime.rst | 29 +- Doc/library/email.parser.rst | 56 ++- Doc/library/email.policy.rst | 9 +- Doc/library/email.rst | 183 ++++++++------ Doc/library/email.util.rst | 9 +- 10 files changed, 203 insertions(+), 151 deletions(-) diff --git a/Doc/library/email.charset.rst b/Doc/library/email.charset.rst --- a/Doc/library/email.charset.rst +++ b/Doc/library/email.charset.rst @@ -234,5 +234,5 @@ *charset* is the canonical name of a character set. *codecname* is the name of a Python codec, as appropriate for the second argument to the :class:`str`'s - :func:`decode` method + :meth:`~str.encode` method diff --git a/Doc/library/email.errors.rst b/Doc/library/email.errors.rst --- a/Doc/library/email.errors.rst +++ b/Doc/library/email.errors.rst @@ -25,7 +25,8 @@ Raised under some error conditions when parsing the :rfc:`2822` headers of a message, this class is derived from :exc:`MessageParseError`. It can be raised - from the :meth:`Parser.parse` or :meth:`Parser.parsestr` methods. + from the :meth:`Parser.parse ` or + :meth:`Parser.parsestr ` methods. Situations where it can be raised include finding an envelope header after the first :rfc:`2822` header of the message, finding a continuation line before the @@ -37,7 +38,8 @@ Raised under some error conditions when parsing the :rfc:`2822` headers of a message, this class is derived from :exc:`MessageParseError`. It can be raised - from the :meth:`Parser.parse` or :meth:`Parser.parsestr` methods. + from the :meth:`Parser.parse ` or + :meth:`Parser.parsestr ` methods. Situations where it can be raised include not being able to find the starting or terminating boundary in a :mimetype:`multipart/\*` message when strict parsing @@ -46,19 +48,20 @@ .. exception:: MultipartConversionError() - Raised when a payload is added to a :class:`Message` object using - :meth:`add_payload`, but the payload is already a scalar and the message's - :mailheader:`Content-Type` main type is not either :mimetype:`multipart` or - missing. :exc:`MultipartConversionError` multiply inherits from - :exc:`MessageError` and the built-in :exc:`TypeError`. + Raised when a payload is added to a :class:`~email.message.Message` object + using :meth:`add_payload`, but the payload is already a scalar and the + message's :mailheader:`Content-Type` main type is not either + :mimetype:`multipart` or missing. :exc:`MultipartConversionError` multiply + inherits from :exc:`MessageError` and the built-in :exc:`TypeError`. - Since :meth:`Message.add_payload` is deprecated, this exception is rarely raised - in practice. However the exception may also be raised if the :meth:`attach` + Since :meth:`Message.add_payload` is deprecated, this exception is rarely + raised in practice. However the exception may also be raised if the + :meth:`~email.message.Message.attach` method is called on an instance of a class derived from :class:`~email.mime.nonmultipart.MIMENonMultipart` (e.g. :class:`~email.mime.image.MIMEImage`). -Here's the list of the defects that the :class:`~email.mime.parser.FeedParser` +Here's the list of the defects that the :class:`~email.parser.FeedParser` can find while parsing messages. Note that the defects are added to the message where the problem was found, so for example, if a message nested inside a :mimetype:`multipart/alternative` had a malformed header, that nested message @@ -97,9 +100,9 @@ This defect has not been used for several Python versions. * :class:`MultipartInvariantViolationDefect` -- A message claimed to be a - :mimetype:`multipart`, but no subparts were found. Note that when a message has - this defect, its :meth:`is_multipart` method may return false even though its - content type claims to be :mimetype:`multipart`. + :mimetype:`multipart`, but no subparts were found. Note that when a message + has this defect, its :meth:`~email.message.Message.is_multipart` method may + return false even though its content type claims to be :mimetype:`multipart`. * :class:`InvalidBase64PaddingDefect` -- When decoding a block of base64 enocded bytes, the padding was not correct. Enough padding is added to diff --git a/Doc/library/email.headerregistry.rst b/Doc/library/email.headerregistry.rst --- a/Doc/library/email.headerregistry.rst +++ b/Doc/library/email.headerregistry.rst @@ -56,15 +56,16 @@ .. attribute:: name The name of the header (the portion of the field before the ':'). This - is exactly the value passed in the :attr:`~EmailPolicy.header_factory` - call for *name*; that is, case is preserved. + is exactly the value passed in the + :attr:`~email.policy.EmailPolicy.header_factory` call for *name*; that + is, case is preserved. .. attribute:: defects A tuple of :exc:`~email.errors.HeaderDefect` instances reporting any RFC compliance problems found during parsing. The email package tries to - be complete about detecting compliance issues. See the :mod:`errors` + be complete about detecting compliance issues. See the :mod:`~email.errors` module for a discussion of the types of defects that may be reported. @@ -230,8 +231,8 @@ The single address encoded by the header value. If the header value actually contains more than one address (which would be a violation of - the RFC under the default :mod:`policy`), accessing this attribute will - result in a :exc:`ValueError`. + the RFC under the default :mod:`~email.policy`), accessing this attribute + will result in a :exc:`ValueError`. Many of the above classes also have a ``Unique`` variant (for example, @@ -275,7 +276,7 @@ .. class:: ContentTypeHeader - A :class:`ParameterizedMIMEHheader` class that handles the + A :class:`ParameterizedMIMEHeader` class that handles the :mailheader:`Content-Type` header. .. attribute:: content_type @@ -289,7 +290,7 @@ .. class:: ContentDispositionHeader - A :class:`ParameterizedMIMEHheader` class that handles the + A :class:`ParameterizedMIMEHeader` class that handles the :mailheader:`Content-Disposition` header. .. attribute:: content-disposition diff --git a/Doc/library/email.iterators.rst b/Doc/library/email.iterators.rst --- a/Doc/library/email.iterators.rst +++ b/Doc/library/email.iterators.rst @@ -6,8 +6,9 @@ Iterating over a message object tree is fairly easy with the -:meth:`Message.walk` method. The :mod:`email.iterators` module provides some -useful higher level iterations over message object trees. +:meth:`Message.walk ` method. The +:mod:`email.iterators` module provides some useful higher level iterations over +message object trees. .. function:: body_line_iterator(msg, decode=False) @@ -16,9 +17,11 @@ string payloads line-by-line. It skips over all the subpart headers, and it skips over any subpart with a payload that isn't a Python string. This is somewhat equivalent to reading the flat text representation of the message from - a file using :meth:`readline`, skipping over all the intervening headers. + a file using :meth:`~io.TextIOBase.readline`, skipping over all the + intervening headers. - Optional *decode* is passed through to :meth:`Message.get_payload`. + Optional *decode* is passed through to :meth:`Message.get_payload + `. .. function:: typed_subpart_iterator(msg, maintype='text', subtype=None) diff --git a/Doc/library/email.message.rst b/Doc/library/email.message.rst --- a/Doc/library/email.message.rst +++ b/Doc/library/email.message.rst @@ -55,8 +55,8 @@ format the message the way you want. For example, by default it does not do the mangling of lines that begin with ``From`` that is required by the unix mbox format. For more flexibility, instantiate a - :class:`~email.generator.Generator` instance and use its :meth:`flatten` - method directly. For example:: + :class:`~email.generator.Generator` instance and use its + :meth:`~email.generator.Generator.flatten` method directly. For example:: from io import StringIO from email.generator import Generator @@ -476,8 +476,8 @@ Set the ``boundary`` parameter of the :mailheader:`Content-Type` header to *boundary*. :meth:`set_boundary` will always quote *boundary* if - necessary. A :exc:`HeaderParseError` is raised if the message object has - no :mailheader:`Content-Type` header. + necessary. A :exc:`~email.errors.HeaderParseError` is raised if the + message object has no :mailheader:`Content-Type` header. Note that using this method is subtly different than deleting the old :mailheader:`Content-Type` header and adding a new one with the new @@ -573,7 +573,8 @@ the end of the message. You do not need to set the epilogue to the empty string in order for the - :class:`Generator` to print a newline at the end of the file. + :class:`~email.generator.Generator` to print a newline at the end of the + file. .. attribute:: defects diff --git a/Doc/library/email.mime.rst b/Doc/library/email.mime.rst --- a/Doc/library/email.mime.rst +++ b/Doc/library/email.mime.rst @@ -35,7 +35,8 @@ *_maintype* is the :mailheader:`Content-Type` major type (e.g. :mimetype:`text` or :mimetype:`image`), and *_subtype* is the :mailheader:`Content-Type` minor type (e.g. :mimetype:`plain` or :mimetype:`gif`). *_params* is a parameter - key/value dictionary and is passed directly to :meth:`Message.add_header`. + key/value dictionary and is passed directly to :meth:`Message.add_header + `. The :class:`MIMEBase` class always adds a :mailheader:`Content-Type` header (based on *_maintype*, *_subtype*, and *_params*), and a @@ -50,8 +51,9 @@ A subclass of :class:`~email.mime.base.MIMEBase`, this is an intermediate base class for MIME messages that are not :mimetype:`multipart`. The primary - purpose of this class is to prevent the use of the :meth:`attach` method, - which only makes sense for :mimetype:`multipart` messages. If :meth:`attach` + purpose of this class is to prevent the use of the + :meth:`~email.message.Message.attach` method, which only makes sense for + :mimetype:`multipart` messages. If :meth:`~email.message.Message.attach` is called, a :exc:`~email.errors.MultipartConversionError` exception is raised. @@ -74,7 +76,8 @@ *_subparts* is a sequence of initial subparts for the payload. It must be possible to convert this sequence to a list. You can always attach new subparts - to the message by using the :meth:`Message.attach` method. + to the message by using the :meth:`Message.attach + ` method. Additional parameters for the :mailheader:`Content-Type` header are taken from the keyword arguments, or passed into the *_params* argument, which is a keyword @@ -95,8 +98,10 @@ Optional *_encoder* is a callable (i.e. function) which will perform the actual encoding of the data for transport. This callable takes one argument, which is - the :class:`MIMEApplication` instance. It should use :meth:`get_payload` and - :meth:`set_payload` to change the payload to encoded form. It should also add + the :class:`MIMEApplication` instance. It should use + :meth:`~email.message.Message.get_payload` and + :meth:`~email.message.Message.set_payload` to change the payload to encoded + form. It should also add any :mailheader:`Content-Transfer-Encoding` or other headers to the message object as necessary. The default encoding is base64. See the :mod:`email.encoders` module for a list of the built-in encoders. @@ -121,8 +126,10 @@ Optional *_encoder* is a callable (i.e. function) which will perform the actual encoding of the audio data for transport. This callable takes one argument, - which is the :class:`MIMEAudio` instance. It should use :meth:`get_payload` and - :meth:`set_payload` to change the payload to encoded form. It should also add + which is the :class:`MIMEAudio` instance. It should use + :meth:`~email.message.Message.get_payload` and + :meth:`~email.message.Message.set_payload` to change the payload to encoded + form. It should also add any :mailheader:`Content-Transfer-Encoding` or other headers to the message object as necessary. The default encoding is base64. See the :mod:`email.encoders` module for a list of the built-in encoders. @@ -147,8 +154,10 @@ Optional *_encoder* is a callable (i.e. function) which will perform the actual encoding of the image data for transport. This callable takes one argument, - which is the :class:`MIMEImage` instance. It should use :meth:`get_payload` and - :meth:`set_payload` to change the payload to encoded form. It should also add + which is the :class:`MIMEImage` instance. It should use + :meth:`~email.message.Message.get_payload` and + :meth:`~email.message.Message.set_payload` to change the payload to encoded + form. It should also add any :mailheader:`Content-Transfer-Encoding` or other headers to the message object as necessary. The default encoding is base64. See the :mod:`email.encoders` module for a list of the built-in encoders. diff --git a/Doc/library/email.parser.rst b/Doc/library/email.parser.rst --- a/Doc/library/email.parser.rst +++ b/Doc/library/email.parser.rst @@ -7,7 +7,8 @@ Message object structures can be created in one of two ways: they can be created from whole cloth by instantiating :class:`~email.message.Message` objects and -stringing them together via :meth:`attach` and :meth:`set_payload` calls, or they +stringing them together via :meth:`~email.message.Message.attach` and +:meth:`~email.message.Message.set_payload` calls, or they can be created by parsing a flat text representation of the email message. The :mod:`email` package provides a standard parser that understands most email @@ -16,8 +17,9 @@ :class:`~email.message.Message` instance of the object structure. For simple, non-MIME messages the payload of this root object will likely be a string containing the text of the message. For MIME messages, the root object will -return ``True`` from its :meth:`is_multipart` method, and the subparts can be -accessed via the :meth:`get_payload` and :meth:`walk` methods. +return ``True`` from its :meth:`~email.message.Message.is_multipart` method, and +the subparts can be accessed via the :meth:`~email.message.Message.get_payload` +and :meth:`~email.message.Message.walk` methods. There are actually two parser interfaces available for use, the classic :class:`Parser` API and the incremental :class:`FeedParser` API. The classic @@ -134,7 +136,8 @@ Read all the data from the file-like object *fp*, parse the resulting text, and return the root message object. *fp* must support both the - :meth:`readline` and the :meth:`read` methods on file-like objects. + :meth:`~io.TextIOBase.readline` and the :meth:`~io.TextIOBase.read` + methods on file-like objects. The text contained in *fp* must be formatted as a block of :rfc:`2822` style headers and header continuation lines, optionally preceded by a @@ -173,8 +176,8 @@ Read all the data from the binary file-like object *fp*, parse the resulting bytes, and return the message object. *fp* must support - both the :meth:`readline` and the :meth:`read` methods on file-like - objects. + both the :meth:`~io.IOBase.readline` and the :meth:`~io.IOBase.read` + methods on file-like objects. The bytes contained in *fp* must be formatted as a block of :rfc:`2822` style headers and header continuation lines, optionally preceded by a @@ -210,7 +213,7 @@ Return a message object structure from a string. This is exactly equivalent to ``Parser().parsestr(s)``. *_class* and *policy* are interpreted as - with the :class:`Parser` class constructor. + with the :class:`~email.parser.Parser` class constructor. .. versionchanged:: 3.3 Removed the *strict* argument. Added the *policy* keyword. @@ -220,7 +223,8 @@ Return a message object structure from a byte string. This is exactly equivalent to ``BytesParser().parsebytes(s)``. Optional *_class* and - *strict* are interpreted as with the :class:`Parser` class constructor. + *strict* are interpreted as with the :class:`~email.parser.Parser` class + constructor. .. versionadded:: 3.2 .. versionchanged:: 3.3 @@ -231,7 +235,8 @@ Return a message object structure tree from an open :term:`file object`. This is exactly equivalent to ``Parser().parse(fp)``. *_class* - and *policy* are interpreted as with the :class:`Parser` class constructor. + and *policy* are interpreted as with the :class:`~email.parser.Parser` class + constructor. .. versionchanged:: Removed the *strict* argument. Added the *policy* keyword. @@ -241,8 +246,8 @@ Return a message object structure tree from an open binary :term:`file object`. This is exactly equivalent to ``BytesParser().parse(fp)``. - *_class* and *policy* are interpreted as with the :class:`Parser` - class constructor. + *_class* and *policy* are interpreted as with the + :class:`~email.parser.Parser` class constructor. .. versionadded:: 3.2 .. versionchanged:: 3.3 @@ -261,32 +266,35 @@ * Most non-\ :mimetype:`multipart` type messages are parsed as a single message object with a string payload. These objects will return ``False`` for - :meth:`is_multipart`. Their :meth:`get_payload` method will return a string - object. + :meth:`~email.message.Message.is_multipart`. Their + :meth:`~email.message.Message.get_payload` method will return a string object. * All :mimetype:`multipart` type messages will be parsed as a container message object with a list of sub-message objects for their payload. The outer - container message will return ``True`` for :meth:`is_multipart` and their - :meth:`get_payload` method will return the list of :class:`~email.message.Message` - subparts. + container message will return ``True`` for + :meth:`~email.message.Message.is_multipart` and their + :meth:`~email.message.Message.get_payload` method will return the list of + :class:`~email.message.Message` subparts. * Most messages with a content type of :mimetype:`message/\*` (e.g. :mimetype:`message/delivery-status` and :mimetype:`message/rfc822`) will also be parsed as container object containing a list payload of length 1. Their - :meth:`is_multipart` method will return ``True``. The single element in the - list payload will be a sub-message object. + :meth:`~email.message.Message.is_multipart` method will return ``True``. + The single element in the list payload will be a sub-message object. * Some non-standards compliant messages may not be internally consistent about their :mimetype:`multipart`\ -edness. Such messages may have a :mailheader:`Content-Type` header of type :mimetype:`multipart`, but their - :meth:`is_multipart` method may return ``False``. If such messages were parsed - with the :class:`FeedParser`, they will have an instance of the - :class:`MultipartInvariantViolationDefect` class in their *defects* attribute - list. See :mod:`email.errors` for details. + :meth:`~email.message.Message.is_multipart` method may return ``False``. + If such messages were parsed with the :class:`~email.parser.FeedParser`, + they will have an instance of the + :class:`~email.errors.MultipartInvariantViolationDefect` class in their + *defects* attribute list. See :mod:`email.errors` for details. .. rubric:: Footnotes .. [#] As of email package version 3.0, introduced in Python 2.4, the classic - :class:`Parser` was re-implemented in terms of the :class:`FeedParser`, so the - semantics and results are identical between the two parsers. + :class:`~email.parser.Parser` was re-implemented in terms of the + :class:`~email.parser.FeedParser`, so the semantics and results are + identical between the two parsers. diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst --- a/Doc/library/email.policy.rst +++ b/Doc/library/email.policy.rst @@ -304,7 +304,7 @@ This concrete :class:`Policy` is the backward compatibility policy. It replicates the behavior of the email package in Python 3.2. The - :mod:`policy` module also defines an instance of this class, + :mod:`~email.policy` module also defines an instance of this class, :const:`compat32`, that is used as the default policy. Thus the default behavior of the email package is to maintain compatibility with Python 3.2. @@ -448,10 +448,11 @@ .. method:: fold_binary(name, value) - The same as :meth:`fold` if :attr:`cte_type` is ``7bit``, except that - the returned value is bytes. + The same as :meth:`fold` if :attr:`~Policy.cte_type` is ``7bit``, except + that the returned value is bytes. - If :attr:`cte_type` is ``8bit``, non-ASCII binary data is converted back + If :attr:`~Policy.cte_type` is ``8bit``, non-ASCII binary data is + converted back into bytes. Headers with binary data are not refolded, regardless of the ``refold_header`` setting, since there is no way to know whether the binary data consists of single byte characters or multibyte characters. diff --git a/Doc/library/email.rst b/Doc/library/email.rst --- a/Doc/library/email.rst +++ b/Doc/library/email.rst @@ -147,14 +147,15 @@ *Note that the version 3 names will continue to work until Python 2.6*. * The :mod:`email.mime.application` module was added, which contains the - :class:`MIMEApplication` class. + :class:`~email.mime.application.MIMEApplication` class. * Methods that were deprecated in version 3 have been removed. These include :meth:`Generator.__call__`, :meth:`Message.get_type`, :meth:`Message.get_main_type`, :meth:`Message.get_subtype`. * Fixes have been added for :rfc:`2231` support which can change some of the - return types for :func:`Message.get_param` and friends. Under some + return types for :func:`Message.get_param ` + and friends. Under some circumstances, values which used to return a 3-tuple now return simple strings (specifically, if all extended parameter segments were unencoded, there is no language and charset designation expected, so the return type is now a simple @@ -163,23 +164,24 @@ Here are the major differences between :mod:`email` version 3 and version 2: -* The :class:`FeedParser` class was introduced, and the :class:`Parser` class - was implemented in terms of the :class:`FeedParser`. All parsing therefore is +* The :class:`~email.parser.FeedParser` class was introduced, and the + :class:`~email.parser.Parser` class was implemented in terms of the + :class:`~email.parser.FeedParser`. All parsing therefore is non-strict, and parsing will make a best effort never to raise an exception. Problems found while parsing messages are stored in the message's *defect* attribute. * All aspects of the API which raised :exc:`DeprecationWarning`\ s in version 2 have been removed. These include the *_encoder* argument to the - :class:`MIMEText` constructor, the :meth:`Message.add_payload` method, the - :func:`Utils.dump_address_pair` function, and the functions :func:`Utils.decode` - and :func:`Utils.encode`. + :class:`~email.mime.text.MIMEText` constructor, the + :meth:`Message.add_payload` method, the :func:`Utils.dump_address_pair` + function, and the functions :func:`Utils.decode` and :func:`Utils.encode`. * New :exc:`DeprecationWarning`\ s have been added to: :meth:`Generator.__call__`, :meth:`Message.get_type`, :meth:`Message.get_main_type`, :meth:`Message.get_subtype`, and the *strict* - argument to the :class:`Parser` class. These are expected to be removed in - future versions. + argument to the :class:`~email.parser.Parser` class. These are expected to + be removed in future versions. * Support for Pythons earlier than 2.3 has been removed. @@ -187,53 +189,61 @@ * The :mod:`email.Header` and :mod:`email.Charset` modules have been added. -* The pickle format for :class:`Message` instances has changed. Since this was - never (and still isn't) formally defined, this isn't considered a backward - incompatibility. However if your application pickles and unpickles - :class:`Message` instances, be aware that in :mod:`email` version 2, - :class:`Message` instances now have private variables *_charset* and - *_default_type*. +* The pickle format for :class:`~email.message.Message` instances has changed. + Since this was never (and still isn't) formally defined, this isn't + considered a backward incompatibility. However if your application pickles + and unpickles :class:`~email.message.Message` instances, be aware that in + :mod:`email` version 2, :class:`~email.message.Message` instances now have + private variables *_charset* and *_default_type*. -* Several methods in the :class:`Message` class have been deprecated, or their - signatures changed. Also, many new methods have been added. See the - documentation for the :class:`Message` class for details. The changes should be - completely backward compatible. +* Several methods in the :class:`~email.message.Message` class have been + deprecated, or their signatures changed. Also, many new methods have been + added. See the documentation for the :class:`~email.message.Message` class + for details. The changes should be completely backward compatible. * The object structure has changed in the face of :mimetype:`message/rfc822` - content types. In :mod:`email` version 1, such a type would be represented by a - scalar payload, i.e. the container message's :meth:`is_multipart` returned - false, :meth:`get_payload` was not a list object, but a single :class:`Message` - instance. + content types. In :mod:`email` version 1, such a type would be represented + by a scalar payload, i.e. the container message's + :meth:`~email.message.Message.is_multipart` returned false, + :meth:`~email.message.Message.get_payload` was not a list object, but a + single :class:`~email.message.Message` instance. This structure was inconsistent with the rest of the package, so the object representation for :mimetype:`message/rfc822` content types was changed. In :mod:`email` version 2, the container *does* return ``True`` from - :meth:`is_multipart`, and :meth:`get_payload` returns a list containing a single - :class:`Message` item. + :meth:`~email.message.Message.is_multipart`, and + :meth:`~email.message.Message.get_payload` returns a list containing a single + :class:`~email.message.Message` item. - Note that this is one place that backward compatibility could not be completely - maintained. However, if you're already testing the return type of - :meth:`get_payload`, you should be fine. You just need to make sure your code - doesn't do a :meth:`set_payload` with a :class:`Message` instance on a container - with a content type of :mimetype:`message/rfc822`. + Note that this is one place that backward compatibility could not be + completely maintained. However, if you're already testing the return type of + :meth:`~email.message.Message.get_payload`, you should be fine. You just need + to make sure your code doesn't do a :meth:`~email.message.Message.set_payload` + with a :class:`~email.message.Message` instance on a container with a content + type of :mimetype:`message/rfc822`. -* The :class:`Parser` constructor's *strict* argument was added, and its - :meth:`parse` and :meth:`parsestr` methods grew a *headersonly* argument. The - *strict* flag was also added to functions :func:`email.message_from_file` and - :func:`email.message_from_string`. +* The :class:`~email.parser.Parser` constructor's *strict* argument was added, + and its :meth:`~email.parser.Parser.parse` and + :meth:`~email.parser.Parser.parsestr` methods grew a *headersonly* argument. + The *strict* flag was also added to functions :func:`email.message_from_file` + and :func:`email.message_from_string`. -* :meth:`Generator.__call__` is deprecated; use :meth:`Generator.flatten` - instead. The :class:`Generator` class has also grown the :meth:`clone` method. +* :meth:`Generator.__call__` is deprecated; use :meth:`Generator.flatten + ` instead. The + :class:`~email.generator.Generator` class has also grown the + :meth:`~email.generator.Generator.clone` method. -* The :class:`DecodedGenerator` class in the :mod:`email.Generator` module was - added. +* The :class:`~email.generator.DecodedGenerator` class in the + :mod:`email.generator` module was added. -* The intermediate base classes :class:`MIMENonMultipart` and - :class:`MIMEMultipart` have been added, and interposed in the class hierarchy - for most of the other MIME-related derived classes. +* The intermediate base classes + :class:`~email.mime.nonmultipart.MIMENonMultipart` and + :class:`~email.mime.multipart.MIMEMultipart` have been added, and interposed + in the class hierarchy for most of the other MIME-related derived classes. -* The *_encoder* argument to the :class:`MIMEText` constructor has been - deprecated. Encoding now happens implicitly based on the *_charset* argument. +* The *_encoder* argument to the :class:`~email.mime.text.MIMEText` constructor + has been deprecated. Encoding now happens implicitly based on the + *_charset* argument. * The following functions in the :mod:`email.Utils` module have been deprecated: :func:`dump_address_pairs`, :func:`decode`, and :func:`encode`. The following @@ -266,17 +276,22 @@ * :func:`messageFromFile` has been renamed to :func:`message_from_file`. -The :class:`Message` class has the following differences: +The :class:`~email.message.Message` class has the following differences: -* The method :meth:`asString` was renamed to :meth:`as_string`. +* The method :meth:`asString` was renamed to + :meth:`~email.message.Message.as_string`. -* The method :meth:`ismultipart` was renamed to :meth:`is_multipart`. +* The method :meth:`ismultipart` was renamed to + :meth:`~email.message.Message.is_multipart`. -* The :meth:`get_payload` method has grown a *decode* optional argument. +* The :meth:`~email.message.Message.get_payload` method has grown a *decode* + optional argument. -* The method :meth:`getall` was renamed to :meth:`get_all`. +* The method :meth:`getall` was renamed to + :meth:`~email.message.Message.get_all`. -* The method :meth:`addheader` was renamed to :meth:`add_header`. +* The method :meth:`addheader` was renamed to + :meth:`~email.message.Message.add_header`. * The method :meth:`gettype` was renamed to :meth:`get_type`. @@ -284,48 +299,57 @@ * The method :meth:`getsubtype` was renamed to :meth:`get_subtype`. -* The method :meth:`getparams` was renamed to :meth:`get_params`. Also, whereas - :meth:`getparams` returned a list of strings, :meth:`get_params` returns a list - of 2-tuples, effectively the key/value pairs of the parameters, split on the - ``'='`` sign. +* The method :meth:`getparams` was renamed to + :meth:`~email.message.Message.get_params`. Also, whereas :meth:`getparams` + returned a list of strings, :meth:`~email.message.Message.get_params` returns + a list of 2-tuples, effectively the key/value pairs of the parameters, split + on the ``'='`` sign. -* The method :meth:`getparam` was renamed to :meth:`get_param`. +* The method :meth:`getparam` was renamed to + :meth:`~email.message.Message.get_param`. -* The method :meth:`getcharsets` was renamed to :meth:`get_charsets`. +* The method :meth:`getcharsets` was renamed to + :meth:`~email.message.Message.get_charsets`. -* The method :meth:`getfilename` was renamed to :meth:`get_filename`. +* The method :meth:`getfilename` was renamed to + :meth:`~email.message.Message.get_filename`. -* The method :meth:`getboundary` was renamed to :meth:`get_boundary`. +* The method :meth:`getboundary` was renamed to + :meth:`~email.message.Message.get_boundary`. -* The method :meth:`setboundary` was renamed to :meth:`set_boundary`. +* The method :meth:`setboundary` was renamed to + :meth:`~email.message.Message.set_boundary`. * The method :meth:`getdecodedpayload` was removed. To get similar - functionality, pass the value 1 to the *decode* flag of the get_payload() - method. + functionality, pass the value 1 to the *decode* flag of the + :meth:`~email.message.Message.get_payload` method. * The method :meth:`getpayloadastext` was removed. Similar functionality is - supported by the :class:`DecodedGenerator` class in the :mod:`email.generator` + supported by the :class:`~email.generator.DecodedGenerator` class in the + :mod:`email.generator` module. + +* The method :meth:`getbodyastext` was removed. You can get similar + functionality by creating an iterator with + :func:`~email.iterators.typed_subpart_iterator` in the :mod:`email.iterators` module. -* The method :meth:`getbodyastext` was removed. You can get similar - functionality by creating an iterator with :func:`typed_subpart_iterator` in the - :mod:`email.iterators` module. +The :class:`~email.parser.Parser` class has no differences in its public +interface. It does have some additional smarts to recognize +:mimetype:`message/delivery-status` type messages, which it represents as a +:class:`~email.message.Message` instance containing separate +:class:`~email.message.Message` subparts for each header block in the delivery +status notification [#]_. -The :class:`Parser` class has no differences in its public interface. It does -have some additional smarts to recognize :mimetype:`message/delivery-status` -type messages, which it represents as a :class:`Message` instance containing -separate :class:`Message` subparts for each header block in the delivery status -notification [#]_. - -The :class:`Generator` class has no differences in its public interface. There -is a new class in the :mod:`email.generator` module though, called -:class:`DecodedGenerator` which provides most of the functionality previously -available in the :meth:`Message.getpayloadastext` method. +The :class:`~email.generator.Generator` class has no differences in its public +interface. There is a new class in the :mod:`email.generator` module though, +called :class:`~email.generator.DecodedGenerator` which provides most of the +functionality previously available in the :meth:`Message.getpayloadastext` +method. The following modules and classes have been changed: -* The :class:`MIMEBase` class constructor arguments *_major* and *_minor* have - changed to *_maintype* and *_subtype* respectively. +* The :class:`~email.mime.base.MIMEBase` class constructor arguments *_major* + and *_minor* have changed to *_maintype* and *_subtype* respectively. * The ``Image`` class/module has been renamed to ``MIMEImage``. The *_minor* argument has been renamed to *_subtype*. @@ -338,7 +362,8 @@ but that clashed with the Python standard library module :mod:`rfc822` on some case-insensitive file systems. - Also, the :class:`MIMEMessage` class now represents any kind of MIME message + Also, the :class:`~email.mime.message.MIMEMessage` class now represents any + kind of MIME message with main type :mimetype:`message`. It takes an optional argument *_subtype* which is used to set the MIME subtype. *_subtype* defaults to :mimetype:`rfc822`. @@ -348,8 +373,8 @@ :mod:`email.utils` module. The ``MsgReader`` class/module has been removed. Its functionality is most -closely supported in the :func:`body_line_iterator` function in the -:mod:`email.iterators` module. +closely supported in the :func:`~email.iterators.body_line_iterator` function +in the :mod:`email.iterators` module. .. rubric:: Footnotes diff --git a/Doc/library/email.util.rst b/Doc/library/email.util.rst --- a/Doc/library/email.util.rst +++ b/Doc/library/email.util.rst @@ -49,8 +49,8 @@ This method returns a list of 2-tuples of the form returned by ``parseaddr()``. *fieldvalues* is a sequence of header field values as might be returned by - :meth:`Message.get_all`. Here's a simple example that gets all the recipients - of a message:: + :meth:`Message.get_all `. Here's a simple + example that gets all the recipients of a message:: from email.utils import getaddresses @@ -187,10 +187,11 @@ .. function:: collapse_rfc2231_value(value, errors='replace', fallback_charset='us-ascii') When a header parameter is encoded in :rfc:`2231` format, - :meth:`Message.get_param` may return a 3-tuple containing the character set, + :meth:`Message.get_param ` may return a + 3-tuple containing the character set, language, and value. :func:`collapse_rfc2231_value` turns this into a unicode string. Optional *errors* is passed to the *errors* argument of :class:`str`'s - :func:`encode` method; it defaults to ``'replace'``. Optional + :func:`~str.encode` method; it defaults to ``'replace'``. Optional *fallback_charset* specifies the character set to use if the one in the :rfc:`2231` header is not known by Python; it defaults to ``'us-ascii'``. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 09:05:11 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 09:05:11 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318761=3A_Improved_cross-references_in_email_doc?= =?utf-8?q?umentation=2E?= Message-ID: <3cJR3W5ZdFz7Ljg@mail.python.org> http://hg.python.org/cpython/rev/464eed5ddb2e changeset: 85260:464eed5ddb2e parent: 85258:97e38b957041 parent: 85259:7a1534dba050 user: Serhiy Storchaka date: Mon Aug 19 10:03:11 2013 +0300 summary: Issue #18761: Improved cross-references in email documentation. files: Doc/library/email.charset.rst | 2 +- Doc/library/email.errors.rst | 29 +- Doc/library/email.headerregistry.rst | 15 +- Doc/library/email.iterators.rst | 11 +- Doc/library/email.message.rst | 14 +- Doc/library/email.mime.rst | 29 +- Doc/library/email.parser.rst | 56 ++- Doc/library/email.policy.rst | 9 +- Doc/library/email.rst | 183 ++++++++------ Doc/library/email.util.rst | 9 +- 10 files changed, 205 insertions(+), 152 deletions(-) diff --git a/Doc/library/email.charset.rst b/Doc/library/email.charset.rst --- a/Doc/library/email.charset.rst +++ b/Doc/library/email.charset.rst @@ -234,5 +234,5 @@ *charset* is the canonical name of a character set. *codecname* is the name of a Python codec, as appropriate for the second argument to the :class:`str`'s - :func:`decode` method + :meth:`~str.encode` method diff --git a/Doc/library/email.errors.rst b/Doc/library/email.errors.rst --- a/Doc/library/email.errors.rst +++ b/Doc/library/email.errors.rst @@ -25,7 +25,8 @@ Raised under some error conditions when parsing the :rfc:`2822` headers of a message, this class is derived from :exc:`MessageParseError`. It can be raised - from the :meth:`Parser.parse` or :meth:`Parser.parsestr` methods. + from the :meth:`Parser.parse ` or + :meth:`Parser.parsestr ` methods. Situations where it can be raised include finding an envelope header after the first :rfc:`2822` header of the message, finding a continuation line before the @@ -37,7 +38,8 @@ Raised under some error conditions when parsing the :rfc:`2822` headers of a message, this class is derived from :exc:`MessageParseError`. It can be raised - from the :meth:`Parser.parse` or :meth:`Parser.parsestr` methods. + from the :meth:`Parser.parse ` or + :meth:`Parser.parsestr ` methods. Situations where it can be raised include not being able to find the starting or terminating boundary in a :mimetype:`multipart/\*` message when strict parsing @@ -46,19 +48,20 @@ .. exception:: MultipartConversionError() - Raised when a payload is added to a :class:`Message` object using - :meth:`add_payload`, but the payload is already a scalar and the message's - :mailheader:`Content-Type` main type is not either :mimetype:`multipart` or - missing. :exc:`MultipartConversionError` multiply inherits from - :exc:`MessageError` and the built-in :exc:`TypeError`. + Raised when a payload is added to a :class:`~email.message.Message` object + using :meth:`add_payload`, but the payload is already a scalar and the + message's :mailheader:`Content-Type` main type is not either + :mimetype:`multipart` or missing. :exc:`MultipartConversionError` multiply + inherits from :exc:`MessageError` and the built-in :exc:`TypeError`. - Since :meth:`Message.add_payload` is deprecated, this exception is rarely raised - in practice. However the exception may also be raised if the :meth:`attach` + Since :meth:`Message.add_payload` is deprecated, this exception is rarely + raised in practice. However the exception may also be raised if the + :meth:`~email.message.Message.attach` method is called on an instance of a class derived from :class:`~email.mime.nonmultipart.MIMENonMultipart` (e.g. :class:`~email.mime.image.MIMEImage`). -Here's the list of the defects that the :class:`~email.mime.parser.FeedParser` +Here's the list of the defects that the :class:`~email.parser.FeedParser` can find while parsing messages. Note that the defects are added to the message where the problem was found, so for example, if a message nested inside a :mimetype:`multipart/alternative` had a malformed header, that nested message @@ -97,9 +100,9 @@ This defect has not been used for several Python versions. * :class:`MultipartInvariantViolationDefect` -- A message claimed to be a - :mimetype:`multipart`, but no subparts were found. Note that when a message has - this defect, its :meth:`is_multipart` method may return false even though its - content type claims to be :mimetype:`multipart`. + :mimetype:`multipart`, but no subparts were found. Note that when a message + has this defect, its :meth:`~email.message.Message.is_multipart` method may + return false even though its content type claims to be :mimetype:`multipart`. * :class:`InvalidBase64PaddingDefect` -- When decoding a block of base64 enocded bytes, the padding was not correct. Enough padding is added to diff --git a/Doc/library/email.headerregistry.rst b/Doc/library/email.headerregistry.rst --- a/Doc/library/email.headerregistry.rst +++ b/Doc/library/email.headerregistry.rst @@ -56,15 +56,16 @@ .. attribute:: name The name of the header (the portion of the field before the ':'). This - is exactly the value passed in the :attr:`~EmailPolicy.header_factory` - call for *name*; that is, case is preserved. + is exactly the value passed in the + :attr:`~email.policy.EmailPolicy.header_factory` call for *name*; that + is, case is preserved. .. attribute:: defects A tuple of :exc:`~email.errors.HeaderDefect` instances reporting any RFC compliance problems found during parsing. The email package tries to - be complete about detecting compliance issues. See the :mod:`errors` + be complete about detecting compliance issues. See the :mod:`~email.errors` module for a discussion of the types of defects that may be reported. @@ -230,8 +231,8 @@ The single address encoded by the header value. If the header value actually contains more than one address (which would be a violation of - the RFC under the default :mod:`policy`), accessing this attribute will - result in a :exc:`ValueError`. + the RFC under the default :mod:`~email.policy`), accessing this attribute + will result in a :exc:`ValueError`. Many of the above classes also have a ``Unique`` variant (for example, @@ -275,7 +276,7 @@ .. class:: ContentTypeHeader - A :class:`ParameterizedMIMEHheader` class that handles the + A :class:`ParameterizedMIMEHeader` class that handles the :mailheader:`Content-Type` header. .. attribute:: content_type @@ -289,7 +290,7 @@ .. class:: ContentDispositionHeader - A :class:`ParameterizedMIMEHheader` class that handles the + A :class:`ParameterizedMIMEHeader` class that handles the :mailheader:`Content-Disposition` header. .. attribute:: content-disposition diff --git a/Doc/library/email.iterators.rst b/Doc/library/email.iterators.rst --- a/Doc/library/email.iterators.rst +++ b/Doc/library/email.iterators.rst @@ -6,8 +6,9 @@ Iterating over a message object tree is fairly easy with the -:meth:`Message.walk` method. The :mod:`email.iterators` module provides some -useful higher level iterations over message object trees. +:meth:`Message.walk ` method. The +:mod:`email.iterators` module provides some useful higher level iterations over +message object trees. .. function:: body_line_iterator(msg, decode=False) @@ -16,9 +17,11 @@ string payloads line-by-line. It skips over all the subpart headers, and it skips over any subpart with a payload that isn't a Python string. This is somewhat equivalent to reading the flat text representation of the message from - a file using :meth:`readline`, skipping over all the intervening headers. + a file using :meth:`~io.TextIOBase.readline`, skipping over all the + intervening headers. - Optional *decode* is passed through to :meth:`Message.get_payload`. + Optional *decode* is passed through to :meth:`Message.get_payload + `. .. function:: typed_subpart_iterator(msg, maintype='text', subtype=None) diff --git a/Doc/library/email.message.rst b/Doc/library/email.message.rst --- a/Doc/library/email.message.rst +++ b/Doc/library/email.message.rst @@ -62,8 +62,8 @@ format the message the way you want. For example, by default it does not do the mangling of lines that begin with ``From`` that is required by the unix mbox format. For more flexibility, instantiate a - :class:`~email.generator.Generator` instance and use its :meth:`flatten` - method directly. For example:: + :class:`~email.generator.Generator` instance and use its + :meth:`~email.generator.Generator.flatten` method directly. For example:: from io import StringIO from email.generator import Generator @@ -105,7 +105,8 @@ not do the mangling of lines that begin with ``From`` that is required by the unix mbox format. For more flexibility, instantiate a :class:`~email.generator.BytesGenerator` instance and use its - :meth:`flatten` method directly. For example:: + :meth:`~email.generator.BytesGenerator.flatten` method directly. + For example:: from io import BytesIO from email.generator import BytesGenerator @@ -530,8 +531,8 @@ Set the ``boundary`` parameter of the :mailheader:`Content-Type` header to *boundary*. :meth:`set_boundary` will always quote *boundary* if - necessary. A :exc:`HeaderParseError` is raised if the message object has - no :mailheader:`Content-Type` header. + necessary. A :exc:`~email.errors.HeaderParseError` is raised if the + message object has no :mailheader:`Content-Type` header. Note that using this method is subtly different than deleting the old :mailheader:`Content-Type` header and adding a new one with the new @@ -627,7 +628,8 @@ the end of the message. You do not need to set the epilogue to the empty string in order for the - :class:`Generator` to print a newline at the end of the file. + :class:`~email.generator.Generator` to print a newline at the end of the + file. .. attribute:: defects diff --git a/Doc/library/email.mime.rst b/Doc/library/email.mime.rst --- a/Doc/library/email.mime.rst +++ b/Doc/library/email.mime.rst @@ -35,7 +35,8 @@ *_maintype* is the :mailheader:`Content-Type` major type (e.g. :mimetype:`text` or :mimetype:`image`), and *_subtype* is the :mailheader:`Content-Type` minor type (e.g. :mimetype:`plain` or :mimetype:`gif`). *_params* is a parameter - key/value dictionary and is passed directly to :meth:`Message.add_header`. + key/value dictionary and is passed directly to :meth:`Message.add_header + `. The :class:`MIMEBase` class always adds a :mailheader:`Content-Type` header (based on *_maintype*, *_subtype*, and *_params*), and a @@ -50,8 +51,9 @@ A subclass of :class:`~email.mime.base.MIMEBase`, this is an intermediate base class for MIME messages that are not :mimetype:`multipart`. The primary - purpose of this class is to prevent the use of the :meth:`attach` method, - which only makes sense for :mimetype:`multipart` messages. If :meth:`attach` + purpose of this class is to prevent the use of the + :meth:`~email.message.Message.attach` method, which only makes sense for + :mimetype:`multipart` messages. If :meth:`~email.message.Message.attach` is called, a :exc:`~email.errors.MultipartConversionError` exception is raised. @@ -74,7 +76,8 @@ *_subparts* is a sequence of initial subparts for the payload. It must be possible to convert this sequence to a list. You can always attach new subparts - to the message by using the :meth:`Message.attach` method. + to the message by using the :meth:`Message.attach + ` method. Additional parameters for the :mailheader:`Content-Type` header are taken from the keyword arguments, or passed into the *_params* argument, which is a keyword @@ -95,8 +98,10 @@ Optional *_encoder* is a callable (i.e. function) which will perform the actual encoding of the data for transport. This callable takes one argument, which is - the :class:`MIMEApplication` instance. It should use :meth:`get_payload` and - :meth:`set_payload` to change the payload to encoded form. It should also add + the :class:`MIMEApplication` instance. It should use + :meth:`~email.message.Message.get_payload` and + :meth:`~email.message.Message.set_payload` to change the payload to encoded + form. It should also add any :mailheader:`Content-Transfer-Encoding` or other headers to the message object as necessary. The default encoding is base64. See the :mod:`email.encoders` module for a list of the built-in encoders. @@ -121,8 +126,10 @@ Optional *_encoder* is a callable (i.e. function) which will perform the actual encoding of the audio data for transport. This callable takes one argument, - which is the :class:`MIMEAudio` instance. It should use :meth:`get_payload` and - :meth:`set_payload` to change the payload to encoded form. It should also add + which is the :class:`MIMEAudio` instance. It should use + :meth:`~email.message.Message.get_payload` and + :meth:`~email.message.Message.set_payload` to change the payload to encoded + form. It should also add any :mailheader:`Content-Transfer-Encoding` or other headers to the message object as necessary. The default encoding is base64. See the :mod:`email.encoders` module for a list of the built-in encoders. @@ -147,8 +154,10 @@ Optional *_encoder* is a callable (i.e. function) which will perform the actual encoding of the image data for transport. This callable takes one argument, - which is the :class:`MIMEImage` instance. It should use :meth:`get_payload` and - :meth:`set_payload` to change the payload to encoded form. It should also add + which is the :class:`MIMEImage` instance. It should use + :meth:`~email.message.Message.get_payload` and + :meth:`~email.message.Message.set_payload` to change the payload to encoded + form. It should also add any :mailheader:`Content-Transfer-Encoding` or other headers to the message object as necessary. The default encoding is base64. See the :mod:`email.encoders` module for a list of the built-in encoders. diff --git a/Doc/library/email.parser.rst b/Doc/library/email.parser.rst --- a/Doc/library/email.parser.rst +++ b/Doc/library/email.parser.rst @@ -7,7 +7,8 @@ Message object structures can be created in one of two ways: they can be created from whole cloth by instantiating :class:`~email.message.Message` objects and -stringing them together via :meth:`attach` and :meth:`set_payload` calls, or they +stringing them together via :meth:`~email.message.Message.attach` and +:meth:`~email.message.Message.set_payload` calls, or they can be created by parsing a flat text representation of the email message. The :mod:`email` package provides a standard parser that understands most email @@ -16,8 +17,9 @@ :class:`~email.message.Message` instance of the object structure. For simple, non-MIME messages the payload of this root object will likely be a string containing the text of the message. For MIME messages, the root object will -return ``True`` from its :meth:`is_multipart` method, and the subparts can be -accessed via the :meth:`get_payload` and :meth:`walk` methods. +return ``True`` from its :meth:`~email.message.Message.is_multipart` method, and +the subparts can be accessed via the :meth:`~email.message.Message.get_payload` +and :meth:`~email.message.Message.walk` methods. There are actually two parser interfaces available for use, the classic :class:`Parser` API and the incremental :class:`FeedParser` API. The classic @@ -134,7 +136,8 @@ Read all the data from the file-like object *fp*, parse the resulting text, and return the root message object. *fp* must support both the - :meth:`readline` and the :meth:`read` methods on file-like objects. + :meth:`~io.TextIOBase.readline` and the :meth:`~io.TextIOBase.read` + methods on file-like objects. The text contained in *fp* must be formatted as a block of :rfc:`2822` style headers and header continuation lines, optionally preceded by a @@ -173,8 +176,8 @@ Read all the data from the binary file-like object *fp*, parse the resulting bytes, and return the message object. *fp* must support - both the :meth:`readline` and the :meth:`read` methods on file-like - objects. + both the :meth:`~io.IOBase.readline` and the :meth:`~io.IOBase.read` + methods on file-like objects. The bytes contained in *fp* must be formatted as a block of :rfc:`2822` style headers and header continuation lines, optionally preceded by a @@ -210,7 +213,7 @@ Return a message object structure from a string. This is exactly equivalent to ``Parser().parsestr(s)``. *_class* and *policy* are interpreted as - with the :class:`Parser` class constructor. + with the :class:`~email.parser.Parser` class constructor. .. versionchanged:: 3.3 Removed the *strict* argument. Added the *policy* keyword. @@ -220,7 +223,8 @@ Return a message object structure from a byte string. This is exactly equivalent to ``BytesParser().parsebytes(s)``. Optional *_class* and - *strict* are interpreted as with the :class:`Parser` class constructor. + *strict* are interpreted as with the :class:`~email.parser.Parser` class + constructor. .. versionadded:: 3.2 .. versionchanged:: 3.3 @@ -231,7 +235,8 @@ Return a message object structure tree from an open :term:`file object`. This is exactly equivalent to ``Parser().parse(fp)``. *_class* - and *policy* are interpreted as with the :class:`Parser` class constructor. + and *policy* are interpreted as with the :class:`~email.parser.Parser` class + constructor. .. versionchanged:: Removed the *strict* argument. Added the *policy* keyword. @@ -241,8 +246,8 @@ Return a message object structure tree from an open binary :term:`file object`. This is exactly equivalent to ``BytesParser().parse(fp)``. - *_class* and *policy* are interpreted as with the :class:`Parser` - class constructor. + *_class* and *policy* are interpreted as with the + :class:`~email.parser.Parser` class constructor. .. versionadded:: 3.2 .. versionchanged:: 3.3 @@ -261,32 +266,35 @@ * Most non-\ :mimetype:`multipart` type messages are parsed as a single message object with a string payload. These objects will return ``False`` for - :meth:`is_multipart`. Their :meth:`get_payload` method will return a string - object. + :meth:`~email.message.Message.is_multipart`. Their + :meth:`~email.message.Message.get_payload` method will return a string object. * All :mimetype:`multipart` type messages will be parsed as a container message object with a list of sub-message objects for their payload. The outer - container message will return ``True`` for :meth:`is_multipart` and their - :meth:`get_payload` method will return the list of :class:`~email.message.Message` - subparts. + container message will return ``True`` for + :meth:`~email.message.Message.is_multipart` and their + :meth:`~email.message.Message.get_payload` method will return the list of + :class:`~email.message.Message` subparts. * Most messages with a content type of :mimetype:`message/\*` (e.g. :mimetype:`message/delivery-status` and :mimetype:`message/rfc822`) will also be parsed as container object containing a list payload of length 1. Their - :meth:`is_multipart` method will return ``True``. The single element in the - list payload will be a sub-message object. + :meth:`~email.message.Message.is_multipart` method will return ``True``. + The single element in the list payload will be a sub-message object. * Some non-standards compliant messages may not be internally consistent about their :mimetype:`multipart`\ -edness. Such messages may have a :mailheader:`Content-Type` header of type :mimetype:`multipart`, but their - :meth:`is_multipart` method may return ``False``. If such messages were parsed - with the :class:`FeedParser`, they will have an instance of the - :class:`MultipartInvariantViolationDefect` class in their *defects* attribute - list. See :mod:`email.errors` for details. + :meth:`~email.message.Message.is_multipart` method may return ``False``. + If such messages were parsed with the :class:`~email.parser.FeedParser`, + they will have an instance of the + :class:`~email.errors.MultipartInvariantViolationDefect` class in their + *defects* attribute list. See :mod:`email.errors` for details. .. rubric:: Footnotes .. [#] As of email package version 3.0, introduced in Python 2.4, the classic - :class:`Parser` was re-implemented in terms of the :class:`FeedParser`, so the - semantics and results are identical between the two parsers. + :class:`~email.parser.Parser` was re-implemented in terms of the + :class:`~email.parser.FeedParser`, so the semantics and results are + identical between the two parsers. diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst --- a/Doc/library/email.policy.rst +++ b/Doc/library/email.policy.rst @@ -315,7 +315,7 @@ This concrete :class:`Policy` is the backward compatibility policy. It replicates the behavior of the email package in Python 3.2. The - :mod:`policy` module also defines an instance of this class, + :mod:`~email.policy` module also defines an instance of this class, :const:`compat32`, that is used as the default policy. Thus the default behavior of the email package is to maintain compatibility with Python 3.2. @@ -459,10 +459,11 @@ .. method:: fold_binary(name, value) - The same as :meth:`fold` if :attr:`cte_type` is ``7bit``, except that - the returned value is bytes. + The same as :meth:`fold` if :attr:`~Policy.cte_type` is ``7bit``, except + that the returned value is bytes. - If :attr:`cte_type` is ``8bit``, non-ASCII binary data is converted back + If :attr:`~Policy.cte_type` is ``8bit``, non-ASCII binary data is + converted back into bytes. Headers with binary data are not refolded, regardless of the ``refold_header`` setting, since there is no way to know whether the binary data consists of single byte characters or multibyte characters. diff --git a/Doc/library/email.rst b/Doc/library/email.rst --- a/Doc/library/email.rst +++ b/Doc/library/email.rst @@ -147,14 +147,15 @@ *Note that the version 3 names will continue to work until Python 2.6*. * The :mod:`email.mime.application` module was added, which contains the - :class:`MIMEApplication` class. + :class:`~email.mime.application.MIMEApplication` class. * Methods that were deprecated in version 3 have been removed. These include :meth:`Generator.__call__`, :meth:`Message.get_type`, :meth:`Message.get_main_type`, :meth:`Message.get_subtype`. * Fixes have been added for :rfc:`2231` support which can change some of the - return types for :func:`Message.get_param` and friends. Under some + return types for :func:`Message.get_param ` + and friends. Under some circumstances, values which used to return a 3-tuple now return simple strings (specifically, if all extended parameter segments were unencoded, there is no language and charset designation expected, so the return type is now a simple @@ -163,23 +164,24 @@ Here are the major differences between :mod:`email` version 3 and version 2: -* The :class:`FeedParser` class was introduced, and the :class:`Parser` class - was implemented in terms of the :class:`FeedParser`. All parsing therefore is +* The :class:`~email.parser.FeedParser` class was introduced, and the + :class:`~email.parser.Parser` class was implemented in terms of the + :class:`~email.parser.FeedParser`. All parsing therefore is non-strict, and parsing will make a best effort never to raise an exception. Problems found while parsing messages are stored in the message's *defect* attribute. * All aspects of the API which raised :exc:`DeprecationWarning`\ s in version 2 have been removed. These include the *_encoder* argument to the - :class:`MIMEText` constructor, the :meth:`Message.add_payload` method, the - :func:`Utils.dump_address_pair` function, and the functions :func:`Utils.decode` - and :func:`Utils.encode`. + :class:`~email.mime.text.MIMEText` constructor, the + :meth:`Message.add_payload` method, the :func:`Utils.dump_address_pair` + function, and the functions :func:`Utils.decode` and :func:`Utils.encode`. * New :exc:`DeprecationWarning`\ s have been added to: :meth:`Generator.__call__`, :meth:`Message.get_type`, :meth:`Message.get_main_type`, :meth:`Message.get_subtype`, and the *strict* - argument to the :class:`Parser` class. These are expected to be removed in - future versions. + argument to the :class:`~email.parser.Parser` class. These are expected to + be removed in future versions. * Support for Pythons earlier than 2.3 has been removed. @@ -187,53 +189,61 @@ * The :mod:`email.Header` and :mod:`email.Charset` modules have been added. -* The pickle format for :class:`Message` instances has changed. Since this was - never (and still isn't) formally defined, this isn't considered a backward - incompatibility. However if your application pickles and unpickles - :class:`Message` instances, be aware that in :mod:`email` version 2, - :class:`Message` instances now have private variables *_charset* and - *_default_type*. +* The pickle format for :class:`~email.message.Message` instances has changed. + Since this was never (and still isn't) formally defined, this isn't + considered a backward incompatibility. However if your application pickles + and unpickles :class:`~email.message.Message` instances, be aware that in + :mod:`email` version 2, :class:`~email.message.Message` instances now have + private variables *_charset* and *_default_type*. -* Several methods in the :class:`Message` class have been deprecated, or their - signatures changed. Also, many new methods have been added. See the - documentation for the :class:`Message` class for details. The changes should be - completely backward compatible. +* Several methods in the :class:`~email.message.Message` class have been + deprecated, or their signatures changed. Also, many new methods have been + added. See the documentation for the :class:`~email.message.Message` class + for details. The changes should be completely backward compatible. * The object structure has changed in the face of :mimetype:`message/rfc822` - content types. In :mod:`email` version 1, such a type would be represented by a - scalar payload, i.e. the container message's :meth:`is_multipart` returned - false, :meth:`get_payload` was not a list object, but a single :class:`Message` - instance. + content types. In :mod:`email` version 1, such a type would be represented + by a scalar payload, i.e. the container message's + :meth:`~email.message.Message.is_multipart` returned false, + :meth:`~email.message.Message.get_payload` was not a list object, but a + single :class:`~email.message.Message` instance. This structure was inconsistent with the rest of the package, so the object representation for :mimetype:`message/rfc822` content types was changed. In :mod:`email` version 2, the container *does* return ``True`` from - :meth:`is_multipart`, and :meth:`get_payload` returns a list containing a single - :class:`Message` item. + :meth:`~email.message.Message.is_multipart`, and + :meth:`~email.message.Message.get_payload` returns a list containing a single + :class:`~email.message.Message` item. - Note that this is one place that backward compatibility could not be completely - maintained. However, if you're already testing the return type of - :meth:`get_payload`, you should be fine. You just need to make sure your code - doesn't do a :meth:`set_payload` with a :class:`Message` instance on a container - with a content type of :mimetype:`message/rfc822`. + Note that this is one place that backward compatibility could not be + completely maintained. However, if you're already testing the return type of + :meth:`~email.message.Message.get_payload`, you should be fine. You just need + to make sure your code doesn't do a :meth:`~email.message.Message.set_payload` + with a :class:`~email.message.Message` instance on a container with a content + type of :mimetype:`message/rfc822`. -* The :class:`Parser` constructor's *strict* argument was added, and its - :meth:`parse` and :meth:`parsestr` methods grew a *headersonly* argument. The - *strict* flag was also added to functions :func:`email.message_from_file` and - :func:`email.message_from_string`. +* The :class:`~email.parser.Parser` constructor's *strict* argument was added, + and its :meth:`~email.parser.Parser.parse` and + :meth:`~email.parser.Parser.parsestr` methods grew a *headersonly* argument. + The *strict* flag was also added to functions :func:`email.message_from_file` + and :func:`email.message_from_string`. -* :meth:`Generator.__call__` is deprecated; use :meth:`Generator.flatten` - instead. The :class:`Generator` class has also grown the :meth:`clone` method. +* :meth:`Generator.__call__` is deprecated; use :meth:`Generator.flatten + ` instead. The + :class:`~email.generator.Generator` class has also grown the + :meth:`~email.generator.Generator.clone` method. -* The :class:`DecodedGenerator` class in the :mod:`email.Generator` module was - added. +* The :class:`~email.generator.DecodedGenerator` class in the + :mod:`email.generator` module was added. -* The intermediate base classes :class:`MIMENonMultipart` and - :class:`MIMEMultipart` have been added, and interposed in the class hierarchy - for most of the other MIME-related derived classes. +* The intermediate base classes + :class:`~email.mime.nonmultipart.MIMENonMultipart` and + :class:`~email.mime.multipart.MIMEMultipart` have been added, and interposed + in the class hierarchy for most of the other MIME-related derived classes. -* The *_encoder* argument to the :class:`MIMEText` constructor has been - deprecated. Encoding now happens implicitly based on the *_charset* argument. +* The *_encoder* argument to the :class:`~email.mime.text.MIMEText` constructor + has been deprecated. Encoding now happens implicitly based on the + *_charset* argument. * The following functions in the :mod:`email.Utils` module have been deprecated: :func:`dump_address_pairs`, :func:`decode`, and :func:`encode`. The following @@ -266,17 +276,22 @@ * :func:`messageFromFile` has been renamed to :func:`message_from_file`. -The :class:`Message` class has the following differences: +The :class:`~email.message.Message` class has the following differences: -* The method :meth:`asString` was renamed to :meth:`as_string`. +* The method :meth:`asString` was renamed to + :meth:`~email.message.Message.as_string`. -* The method :meth:`ismultipart` was renamed to :meth:`is_multipart`. +* The method :meth:`ismultipart` was renamed to + :meth:`~email.message.Message.is_multipart`. -* The :meth:`get_payload` method has grown a *decode* optional argument. +* The :meth:`~email.message.Message.get_payload` method has grown a *decode* + optional argument. -* The method :meth:`getall` was renamed to :meth:`get_all`. +* The method :meth:`getall` was renamed to + :meth:`~email.message.Message.get_all`. -* The method :meth:`addheader` was renamed to :meth:`add_header`. +* The method :meth:`addheader` was renamed to + :meth:`~email.message.Message.add_header`. * The method :meth:`gettype` was renamed to :meth:`get_type`. @@ -284,48 +299,57 @@ * The method :meth:`getsubtype` was renamed to :meth:`get_subtype`. -* The method :meth:`getparams` was renamed to :meth:`get_params`. Also, whereas - :meth:`getparams` returned a list of strings, :meth:`get_params` returns a list - of 2-tuples, effectively the key/value pairs of the parameters, split on the - ``'='`` sign. +* The method :meth:`getparams` was renamed to + :meth:`~email.message.Message.get_params`. Also, whereas :meth:`getparams` + returned a list of strings, :meth:`~email.message.Message.get_params` returns + a list of 2-tuples, effectively the key/value pairs of the parameters, split + on the ``'='`` sign. -* The method :meth:`getparam` was renamed to :meth:`get_param`. +* The method :meth:`getparam` was renamed to + :meth:`~email.message.Message.get_param`. -* The method :meth:`getcharsets` was renamed to :meth:`get_charsets`. +* The method :meth:`getcharsets` was renamed to + :meth:`~email.message.Message.get_charsets`. -* The method :meth:`getfilename` was renamed to :meth:`get_filename`. +* The method :meth:`getfilename` was renamed to + :meth:`~email.message.Message.get_filename`. -* The method :meth:`getboundary` was renamed to :meth:`get_boundary`. +* The method :meth:`getboundary` was renamed to + :meth:`~email.message.Message.get_boundary`. -* The method :meth:`setboundary` was renamed to :meth:`set_boundary`. +* The method :meth:`setboundary` was renamed to + :meth:`~email.message.Message.set_boundary`. * The method :meth:`getdecodedpayload` was removed. To get similar - functionality, pass the value 1 to the *decode* flag of the get_payload() - method. + functionality, pass the value 1 to the *decode* flag of the + :meth:`~email.message.Message.get_payload` method. * The method :meth:`getpayloadastext` was removed. Similar functionality is - supported by the :class:`DecodedGenerator` class in the :mod:`email.generator` + supported by the :class:`~email.generator.DecodedGenerator` class in the + :mod:`email.generator` module. + +* The method :meth:`getbodyastext` was removed. You can get similar + functionality by creating an iterator with + :func:`~email.iterators.typed_subpart_iterator` in the :mod:`email.iterators` module. -* The method :meth:`getbodyastext` was removed. You can get similar - functionality by creating an iterator with :func:`typed_subpart_iterator` in the - :mod:`email.iterators` module. +The :class:`~email.parser.Parser` class has no differences in its public +interface. It does have some additional smarts to recognize +:mimetype:`message/delivery-status` type messages, which it represents as a +:class:`~email.message.Message` instance containing separate +:class:`~email.message.Message` subparts for each header block in the delivery +status notification [#]_. -The :class:`Parser` class has no differences in its public interface. It does -have some additional smarts to recognize :mimetype:`message/delivery-status` -type messages, which it represents as a :class:`Message` instance containing -separate :class:`Message` subparts for each header block in the delivery status -notification [#]_. - -The :class:`Generator` class has no differences in its public interface. There -is a new class in the :mod:`email.generator` module though, called -:class:`DecodedGenerator` which provides most of the functionality previously -available in the :meth:`Message.getpayloadastext` method. +The :class:`~email.generator.Generator` class has no differences in its public +interface. There is a new class in the :mod:`email.generator` module though, +called :class:`~email.generator.DecodedGenerator` which provides most of the +functionality previously available in the :meth:`Message.getpayloadastext` +method. The following modules and classes have been changed: -* The :class:`MIMEBase` class constructor arguments *_major* and *_minor* have - changed to *_maintype* and *_subtype* respectively. +* The :class:`~email.mime.base.MIMEBase` class constructor arguments *_major* + and *_minor* have changed to *_maintype* and *_subtype* respectively. * The ``Image`` class/module has been renamed to ``MIMEImage``. The *_minor* argument has been renamed to *_subtype*. @@ -338,7 +362,8 @@ but that clashed with the Python standard library module :mod:`rfc822` on some case-insensitive file systems. - Also, the :class:`MIMEMessage` class now represents any kind of MIME message + Also, the :class:`~email.mime.message.MIMEMessage` class now represents any + kind of MIME message with main type :mimetype:`message`. It takes an optional argument *_subtype* which is used to set the MIME subtype. *_subtype* defaults to :mimetype:`rfc822`. @@ -348,8 +373,8 @@ :mod:`email.utils` module. The ``MsgReader`` class/module has been removed. Its functionality is most -closely supported in the :func:`body_line_iterator` function in the -:mod:`email.iterators` module. +closely supported in the :func:`~email.iterators.body_line_iterator` function +in the :mod:`email.iterators` module. .. rubric:: Footnotes diff --git a/Doc/library/email.util.rst b/Doc/library/email.util.rst --- a/Doc/library/email.util.rst +++ b/Doc/library/email.util.rst @@ -49,8 +49,8 @@ This method returns a list of 2-tuples of the form returned by ``parseaddr()``. *fieldvalues* is a sequence of header field values as might be returned by - :meth:`Message.get_all`. Here's a simple example that gets all the recipients - of a message:: + :meth:`Message.get_all `. Here's a simple + example that gets all the recipients of a message:: from email.utils import getaddresses @@ -187,10 +187,11 @@ .. function:: collapse_rfc2231_value(value, errors='replace', fallback_charset='us-ascii') When a header parameter is encoded in :rfc:`2231` format, - :meth:`Message.get_param` may return a 3-tuple containing the character set, + :meth:`Message.get_param ` may return a + 3-tuple containing the character set, language, and value. :func:`collapse_rfc2231_value` turns this into a unicode string. Optional *errors* is passed to the *errors* argument of :class:`str`'s - :func:`encode` method; it defaults to ``'replace'``. Optional + :func:`~str.encode` method; it defaults to ``'replace'``. Optional *fallback_charset* specifies the character set to use if the one in the :rfc:`2231` header is not known by Python; it defaults to ``'us-ascii'``. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 09:05:13 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 09:05:13 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NzYx?= =?utf-8?q?=3A_Improved_cross-references_in_email_documentation=2E?= Message-ID: <3cJR3Y3rdfz7Ljy@mail.python.org> http://hg.python.org/cpython/rev/062533327ad2 changeset: 85261:062533327ad2 branch: 2.7 parent: 85256:310d187020e3 user: Serhiy Storchaka date: Mon Aug 19 10:03:25 2013 +0300 summary: Issue #18761: Improved cross-references in email documentation. files: Doc/library/email.charset.rst | 2 +- Doc/library/email.errors.rst | 29 +- Doc/library/email.header.rst | 2 +- Doc/library/email.iterators.rst | 11 +- Doc/library/email.message.rst | 11 +- Doc/library/email.mime.rst | 29 ++- Doc/library/email.parser.rst | 46 ++-- Doc/library/email.rst | 183 +++++++++++-------- Doc/library/email.util.rst | 14 +- 9 files changed, 188 insertions(+), 139 deletions(-) diff --git a/Doc/library/email.charset.rst b/Doc/library/email.charset.rst --- a/Doc/library/email.charset.rst +++ b/Doc/library/email.charset.rst @@ -249,5 +249,5 @@ *charset* is the canonical name of a character set. *codecname* is the name of a Python codec, as appropriate for the second argument to the :func:`unicode` - built-in, or to the :meth:`encode` method of a Unicode string. + built-in, or to the :meth:`~unicode.encode` method of a Unicode string. diff --git a/Doc/library/email.errors.rst b/Doc/library/email.errors.rst --- a/Doc/library/email.errors.rst +++ b/Doc/library/email.errors.rst @@ -25,7 +25,8 @@ Raised under some error conditions when parsing the :rfc:`2822` headers of a message, this class is derived from :exc:`MessageParseError`. It can be raised - from the :meth:`Parser.parse` or :meth:`Parser.parsestr` methods. + from the :meth:`Parser.parse ` or + :meth:`Parser.parsestr ` methods. Situations where it can be raised include finding an envelope header after the first :rfc:`2822` header of the message, finding a continuation line before the @@ -37,7 +38,8 @@ Raised under some error conditions when parsing the :rfc:`2822` headers of a message, this class is derived from :exc:`MessageParseError`. It can be raised - from the :meth:`Parser.parse` or :meth:`Parser.parsestr` methods. + from the :meth:`Parser.parse ` or + :meth:`Parser.parsestr ` methods. Situations where it can be raised include not being able to find the starting or terminating boundary in a :mimetype:`multipart/\*` message when strict parsing @@ -46,19 +48,20 @@ .. exception:: MultipartConversionError() - Raised when a payload is added to a :class:`Message` object using - :meth:`add_payload`, but the payload is already a scalar and the message's - :mailheader:`Content-Type` main type is not either :mimetype:`multipart` or - missing. :exc:`MultipartConversionError` multiply inherits from - :exc:`MessageError` and the built-in :exc:`TypeError`. + Raised when a payload is added to a :class:`~email.message.Message` object + using :meth:`add_payload`, but the payload is already a scalar and the + message's :mailheader:`Content-Type` main type is not either + :mimetype:`multipart` or missing. :exc:`MultipartConversionError` multiply + inherits from :exc:`MessageError` and the built-in :exc:`TypeError`. - Since :meth:`Message.add_payload` is deprecated, this exception is rarely raised - in practice. However the exception may also be raised if the :meth:`attach` + Since :meth:`Message.add_payload` is deprecated, this exception is rarely + raised in practice. However the exception may also be raised if the + :meth:`~email.message.Message.attach` method is called on an instance of a class derived from :class:`~email.mime.nonmultipart.MIMENonMultipart` (e.g. :class:`~email.mime.image.MIMEImage`). -Here's the list of the defects that the :class:`~email.mime.parser.FeedParser` +Here's the list of the defects that the :class:`~email.parser.FeedParser` can find while parsing messages. Note that the defects are added to the message where the problem was found, so for example, if a message nested inside a :mimetype:`multipart/alternative` had a malformed header, that nested message @@ -86,7 +89,7 @@ or was otherwise malformed. * :class:`MultipartInvariantViolationDefect` -- A message claimed to be a - :mimetype:`multipart`, but no subparts were found. Note that when a message has - this defect, its :meth:`is_multipart` method may return false even though its - content type claims to be :mimetype:`multipart`. + :mimetype:`multipart`, but no subparts were found. Note that when a message + has this defect, its :meth:`~email.message.Message.is_multipart` method may + return false even though its content type claims to be :mimetype:`multipart`. diff --git a/Doc/library/email.header.rst b/Doc/library/email.header.rst --- a/Doc/library/email.header.rst +++ b/Doc/library/email.header.rst @@ -103,7 +103,7 @@ not provoke a :exc:`UnicodeError` is used. Optional *errors* is passed through to any :func:`unicode` or - :func:`ustr.encode` call, and defaults to "strict". + :meth:`unicode.encode` call, and defaults to "strict". .. method:: encode([splitchars]) diff --git a/Doc/library/email.iterators.rst b/Doc/library/email.iterators.rst --- a/Doc/library/email.iterators.rst +++ b/Doc/library/email.iterators.rst @@ -6,8 +6,9 @@ Iterating over a message object tree is fairly easy with the -:meth:`Message.walk` method. The :mod:`email.iterators` module provides some -useful higher level iterations over message object trees. +:meth:`Message.walk ` method. The +:mod:`email.iterators` module provides some useful higher level iterations over +message object trees. .. function:: body_line_iterator(msg[, decode]) @@ -16,9 +17,11 @@ string payloads line-by-line. It skips over all the subpart headers, and it skips over any subpart with a payload that isn't a Python string. This is somewhat equivalent to reading the flat text representation of the message from - a file using :meth:`readline`, skipping over all the intervening headers. + a file using :meth:`~io.TextIOBase.readline`, skipping over all the + intervening headers. - Optional *decode* is passed through to :meth:`Message.get_payload`. + Optional *decode* is passed through to :meth:`Message.get_payload + `. .. function:: typed_subpart_iterator(msg[, maintype[, subtype]]) diff --git a/Doc/library/email.message.rst b/Doc/library/email.message.rst --- a/Doc/library/email.message.rst +++ b/Doc/library/email.message.rst @@ -48,8 +48,8 @@ Note that this method is provided as a convenience and may not always format the message the way you want. For example, by default it mangles lines that begin with ``From``. For more flexibility, instantiate a - :class:`~email.generator.Generator` instance and use its :meth:`flatten` - method directly. For example:: + :class:`~email.generator.Generator` instance and use its + :meth:`~email.generator.Generator.flatten` method directly. For example:: from cStringIO import StringIO from email.generator import Generator @@ -494,8 +494,8 @@ Set the ``boundary`` parameter of the :mailheader:`Content-Type` header to *boundary*. :meth:`set_boundary` will always quote *boundary* if - necessary. A :exc:`HeaderParseError` is raised if the message object has - no :mailheader:`Content-Type` header. + necessary. A :exc:`~email.errors.HeaderParseError` is raised if the + message object has no :mailheader:`Content-Type` header. Note that using this method is subtly different than deleting the old :mailheader:`Content-Type` header and adding a new one with the new @@ -589,7 +589,8 @@ .. versionchanged:: 2.5 You do not need to set the epilogue to the empty string in order for the - :class:`Generator` to print a newline at the end of the file. + :class:`~email.generator.Generator` to print a newline at the end of the + file. .. attribute:: defects diff --git a/Doc/library/email.mime.rst b/Doc/library/email.mime.rst --- a/Doc/library/email.mime.rst +++ b/Doc/library/email.mime.rst @@ -35,7 +35,8 @@ *_maintype* is the :mailheader:`Content-Type` major type (e.g. :mimetype:`text` or :mimetype:`image`), and *_subtype* is the :mailheader:`Content-Type` minor type (e.g. :mimetype:`plain` or :mimetype:`gif`). *_params* is a parameter - key/value dictionary and is passed directly to :meth:`Message.add_header`. + key/value dictionary and is passed directly to :meth:`Message.add_header + `. The :class:`MIMEBase` class always adds a :mailheader:`Content-Type` header (based on *_maintype*, *_subtype*, and *_params*), and a @@ -50,8 +51,9 @@ A subclass of :class:`~email.mime.base.MIMEBase`, this is an intermediate base class for MIME messages that are not :mimetype:`multipart`. The primary - purpose of this class is to prevent the use of the :meth:`attach` method, - which only makes sense for :mimetype:`multipart` messages. If :meth:`attach` + purpose of this class is to prevent the use of the + :meth:`~email.message.Message.attach` method, which only makes sense for + :mimetype:`multipart` messages. If :meth:`~email.message.Message.attach` is called, a :exc:`~email.errors.MultipartConversionError` exception is raised. .. versionadded:: 2.2.2 @@ -76,7 +78,8 @@ *_subparts* is a sequence of initial subparts for the payload. It must be possible to convert this sequence to a list. You can always attach new subparts - to the message by using the :meth:`Message.attach` method. + to the message by using the :meth:`Message.attach + ` method. Additional parameters for the :mailheader:`Content-Type` header are taken from the keyword arguments, or passed into the *_params* argument, which is a keyword @@ -99,8 +102,10 @@ Optional *_encoder* is a callable (i.e. function) which will perform the actual encoding of the data for transport. This callable takes one argument, which is - the :class:`MIMEApplication` instance. It should use :meth:`get_payload` and - :meth:`set_payload` to change the payload to encoded form. It should also add + the :class:`MIMEApplication` instance. It should use + :meth:`~email.message.Message.get_payload` and + :meth:`~email.message.Message.set_payload` to change the payload to encoded + form. It should also add any :mailheader:`Content-Transfer-Encoding` or other headers to the message object as necessary. The default encoding is base64. See the :mod:`email.encoders` module for a list of the built-in encoders. @@ -127,8 +132,10 @@ Optional *_encoder* is a callable (i.e. function) which will perform the actual encoding of the audio data for transport. This callable takes one argument, - which is the :class:`MIMEAudio` instance. It should use :meth:`get_payload` and - :meth:`set_payload` to change the payload to encoded form. It should also add + which is the :class:`MIMEAudio` instance. It should use + :meth:`~email.message.Message.get_payload` and + :meth:`~email.message.Message.set_payload` to change the payload to encoded + form. It should also add any :mailheader:`Content-Transfer-Encoding` or other headers to the message object as necessary. The default encoding is base64. See the :mod:`email.encoders` module for a list of the built-in encoders. @@ -153,8 +160,10 @@ Optional *_encoder* is a callable (i.e. function) which will perform the actual encoding of the image data for transport. This callable takes one argument, - which is the :class:`MIMEImage` instance. It should use :meth:`get_payload` and - :meth:`set_payload` to change the payload to encoded form. It should also add + which is the :class:`MIMEImage` instance. It should use + :meth:`~email.message.Message.get_payload` and + :meth:`~email.message.Message.set_payload` to change the payload to encoded + form. It should also add any :mailheader:`Content-Transfer-Encoding` or other headers to the message object as necessary. The default encoding is base64. See the :mod:`email.encoders` module for a list of the built-in encoders. diff --git a/Doc/library/email.parser.rst b/Doc/library/email.parser.rst --- a/Doc/library/email.parser.rst +++ b/Doc/library/email.parser.rst @@ -7,7 +7,8 @@ Message object structures can be created in one of two ways: they can be created from whole cloth by instantiating :class:`~email.message.Message` objects and -stringing them together via :meth:`attach` and :meth:`set_payload` calls, or they +stringing them together via :meth:`~email.message.Message.attach` and +:meth:`~email.message.Message.set_payload` calls, or they can be created by parsing a flat text representation of the email message. The :mod:`email` package provides a standard parser that understands most email @@ -16,8 +17,9 @@ :class:`~email.message.Message` instance of the object structure. For simple, non-MIME messages the payload of this root object will likely be a string containing the text of the message. For MIME messages, the root object will -return ``True`` from its :meth:`is_multipart` method, and the subparts can be -accessed via the :meth:`get_payload` and :meth:`walk` methods. +return ``True`` from its :meth:`~email.message.Message.is_multipart` method, and +the subparts can be accessed via the :meth:`~email.message.Message.get_payload` +and :meth:`~email.message.Message.walk` methods. There are actually two parser interfaces available for use, the classic :class:`Parser` API and the incremental :class:`FeedParser` API. The classic @@ -127,7 +129,8 @@ Read all the data from the file-like object *fp*, parse the resulting text, and return the root message object. *fp* must support both the - :meth:`readline` and the :meth:`read` methods on file-like objects. + :meth:`~io.TextIOBase.readline` and the :meth:`~io.TextIOBase.read` + methods on file-like objects. The text contained in *fp* must be formatted as a block of :rfc:`2822` style headers and header continuation lines, optionally preceded by a @@ -147,7 +150,7 @@ Similar to the :meth:`parse` method, except it takes a string object instead of a file-like object. Calling this method on a string is exactly - equivalent to wrapping *text* in a :class:`StringIO` instance first and + equivalent to wrapping *text* in a :class:`~StringIO.StringIO` instance first and calling :meth:`parse`. Optional *headersonly* is as with the :meth:`parse` method. @@ -165,7 +168,7 @@ Return a message object structure from a string. This is exactly equivalent to ``Parser().parsestr(s)``. Optional *_class* and *strict* are interpreted as - with the :class:`Parser` class constructor. + with the :class:``~email.parser.Parser` class constructor. .. versionchanged:: 2.2.2 The *strict* flag was added. @@ -175,7 +178,7 @@ Return a message object structure tree from an open file object. This is exactly equivalent to ``Parser().parse(fp)``. Optional *_class* and *strict* - are interpreted as with the :class:`Parser` class constructor. + are interpreted as with the :class:``~email.parser.Parser` class constructor. .. versionchanged:: 2.2.2 The *strict* flag was added. @@ -193,32 +196,35 @@ * Most non-\ :mimetype:`multipart` type messages are parsed as a single message object with a string payload. These objects will return ``False`` for - :meth:`is_multipart`. Their :meth:`get_payload` method will return a string - object. + :meth:`~email.message.Message.is_multipart`. Their + :meth:`~email.message.Message.get_payload` method will return a string object. * All :mimetype:`multipart` type messages will be parsed as a container message object with a list of sub-message objects for their payload. The outer - container message will return ``True`` for :meth:`is_multipart` and their - :meth:`get_payload` method will return the list of :class:`~email.message.Message` - subparts. + container message will return ``True`` for + :meth:`~email.message.Message.is_multipart` and their + :meth:`~email.message.Message.get_payload` method will return the list of + :class:`~email.message.Message` subparts. * Most messages with a content type of :mimetype:`message/\*` (e.g. :mimetype:`message/delivery-status` and :mimetype:`message/rfc822`) will also be parsed as container object containing a list payload of length 1. Their - :meth:`is_multipart` method will return ``True``. The single element in the - list payload will be a sub-message object. + :meth:`~email.message.Message.is_multipart` method will return ``True``. + The single element in the list payload will be a sub-message object. * Some non-standards compliant messages may not be internally consistent about their :mimetype:`multipart`\ -edness. Such messages may have a :mailheader:`Content-Type` header of type :mimetype:`multipart`, but their - :meth:`is_multipart` method may return ``False``. If such messages were parsed - with the :class:`FeedParser`, they will have an instance of the - :class:`MultipartInvariantViolationDefect` class in their *defects* attribute - list. See :mod:`email.errors` for details. + :meth:`~email.message.Message.is_multipart` method may return ``False``. + If such messages were parsed with the :class:`~email.parser.FeedParser`, + they will have an instance of the + :class:`~email.errors.MultipartInvariantViolationDefect` class in their + *defects* attribute list. See :mod:`email.errors` for details. .. rubric:: Footnotes .. [#] As of email package version 3.0, introduced in Python 2.4, the classic - :class:`Parser` was re-implemented in terms of the :class:`FeedParser`, so the - semantics and results are identical between the two parsers. + :class:`~email.parser.Parser` was re-implemented in terms of the + :class:`~email.parser.FeedParser`, so the semantics and results are + identical between the two parsers. diff --git a/Doc/library/email.rst b/Doc/library/email.rst --- a/Doc/library/email.rst +++ b/Doc/library/email.rst @@ -112,14 +112,15 @@ *Note that the version 3 names will continue to work until Python 2.6*. * The :mod:`email.mime.application` module was added, which contains the - :class:`MIMEApplication` class. + :class:`~email.mime.application.MIMEApplication` class. * Methods that were deprecated in version 3 have been removed. These include :meth:`Generator.__call__`, :meth:`Message.get_type`, :meth:`Message.get_main_type`, :meth:`Message.get_subtype`. * Fixes have been added for :rfc:`2231` support which can change some of the - return types for :func:`Message.get_param` and friends. Under some + return types for :func:`Message.get_param ` + and friends. Under some circumstances, values which used to return a 3-tuple now return simple strings (specifically, if all extended parameter segments were unencoded, there is no language and charset designation expected, so the return type is now a simple @@ -128,23 +129,24 @@ Here are the major differences between :mod:`email` version 3 and version 2: -* The :class:`FeedParser` class was introduced, and the :class:`Parser` class - was implemented in terms of the :class:`FeedParser`. All parsing therefore is +* The :class:`~email.parser.FeedParser` class was introduced, and the + :class:`~email.parser.Parser` class was implemented in terms of the + :class:`~email.parser.FeedParser`. All parsing therefore is non-strict, and parsing will make a best effort never to raise an exception. Problems found while parsing messages are stored in the message's *defect* attribute. * All aspects of the API which raised :exc:`DeprecationWarning`\ s in version 2 have been removed. These include the *_encoder* argument to the - :class:`MIMEText` constructor, the :meth:`Message.add_payload` method, the - :func:`Utils.dump_address_pair` function, and the functions :func:`Utils.decode` - and :func:`Utils.encode`. + :class:`~email.mime.text.MIMEText` constructor, the + :meth:`Message.add_payload` method, the :func:`Utils.dump_address_pair` + function, and the functions :func:`Utils.decode` and :func:`Utils.encode`. * New :exc:`DeprecationWarning`\ s have been added to: :meth:`Generator.__call__`, :meth:`Message.get_type`, :meth:`Message.get_main_type`, :meth:`Message.get_subtype`, and the *strict* - argument to the :class:`Parser` class. These are expected to be removed in - future versions. + argument to the :class:`~email.parser.Parser` class. These are expected to + be removed in future versions. * Support for Pythons earlier than 2.3 has been removed. @@ -152,53 +154,61 @@ * The :mod:`email.Header` and :mod:`email.Charset` modules have been added. -* The pickle format for :class:`Message` instances has changed. Since this was - never (and still isn't) formally defined, this isn't considered a backward - incompatibility. However if your application pickles and unpickles - :class:`Message` instances, be aware that in :mod:`email` version 2, - :class:`Message` instances now have private variables *_charset* and - *_default_type*. +* The pickle format for :class:`~email.message.Message` instances has changed. + Since this was never (and still isn't) formally defined, this isn't + considered a backward incompatibility. However if your application pickles + and unpickles :class:`~email.message.Message` instances, be aware that in + :mod:`email` version 2, :class:`~email.message.Message` instances now have + private variables *_charset* and *_default_type*. -* Several methods in the :class:`Message` class have been deprecated, or their - signatures changed. Also, many new methods have been added. See the - documentation for the :class:`Message` class for details. The changes should be - completely backward compatible. +* Several methods in the :class:`~email.message.Message` class have been + deprecated, or their signatures changed. Also, many new methods have been + added. See the documentation for the :class:`~email.message.Message` class + for details. The changes should be completely backward compatible. * The object structure has changed in the face of :mimetype:`message/rfc822` - content types. In :mod:`email` version 1, such a type would be represented by a - scalar payload, i.e. the container message's :meth:`is_multipart` returned - false, :meth:`get_payload` was not a list object, but a single :class:`Message` - instance. + content types. In :mod:`email` version 1, such a type would be represented + by a scalar payload, i.e. the container message's + :meth:`~email.message.Message.is_multipart` returned false, + :meth:`~email.message.Message.get_payload` was not a list object, but a + single :class:`~email.message.Message` instance. This structure was inconsistent with the rest of the package, so the object representation for :mimetype:`message/rfc822` content types was changed. In :mod:`email` version 2, the container *does* return ``True`` from - :meth:`is_multipart`, and :meth:`get_payload` returns a list containing a single - :class:`Message` item. + :meth:`~email.message.Message.is_multipart`, and + :meth:`~email.message.Message.get_payload` returns a list containing a single + :class:`~email.message.Message` item. - Note that this is one place that backward compatibility could not be completely - maintained. However, if you're already testing the return type of - :meth:`get_payload`, you should be fine. You just need to make sure your code - doesn't do a :meth:`set_payload` with a :class:`Message` instance on a container - with a content type of :mimetype:`message/rfc822`. + Note that this is one place that backward compatibility could not be + completely maintained. However, if you're already testing the return type of + :meth:`~email.message.Message.get_payload`, you should be fine. You just need + to make sure your code doesn't do a :meth:`~email.message.Message.set_payload` + with a :class:`~email.message.Message` instance on a container with a content + type of :mimetype:`message/rfc822`. -* The :class:`Parser` constructor's *strict* argument was added, and its - :meth:`parse` and :meth:`parsestr` methods grew a *headersonly* argument. The - *strict* flag was also added to functions :func:`email.message_from_file` and - :func:`email.message_from_string`. +* The :class:`~email.parser.Parser` constructor's *strict* argument was added, + and its :meth:`~email.parser.Parser.parse` and + :meth:`~email.parser.Parser.parsestr` methods grew a *headersonly* argument. + The *strict* flag was also added to functions :func:`email.message_from_file` + and :func:`email.message_from_string`. -* :meth:`Generator.__call__` is deprecated; use :meth:`Generator.flatten` - instead. The :class:`Generator` class has also grown the :meth:`clone` method. +* :meth:`Generator.__call__` is deprecated; use :meth:`Generator.flatten + ` instead. The + :class:`~email.generator.Generator` class has also grown the + :meth:`~email.generator.Generator.clone` method. -* The :class:`DecodedGenerator` class in the :mod:`email.Generator` module was - added. +* The :class:`~email.generator.DecodedGenerator` class in the + :mod:`email.generator` module was added. -* The intermediate base classes :class:`MIMENonMultipart` and - :class:`MIMEMultipart` have been added, and interposed in the class hierarchy - for most of the other MIME-related derived classes. +* The intermediate base classes + :class:`~email.mime.nonmultipart.MIMENonMultipart` and + :class:`~email.mime.multipart.MIMEMultipart` have been added, and interposed + in the class hierarchy for most of the other MIME-related derived classes. -* The *_encoder* argument to the :class:`MIMEText` constructor has been - deprecated. Encoding now happens implicitly based on the *_charset* argument. +* The *_encoder* argument to the :class:`~email.mime.text.MIMEText` constructor + has been deprecated. Encoding now happens implicitly based on the + *_charset* argument. * The following functions in the :mod:`email.Utils` module have been deprecated: :func:`dump_address_pairs`, :func:`decode`, and :func:`encode`. The following @@ -231,17 +241,22 @@ * :func:`messageFromFile` has been renamed to :func:`message_from_file`. -The :class:`Message` class has the following differences: +The :class:`~email.message.Message` class has the following differences: -* The method :meth:`asString` was renamed to :meth:`as_string`. +* The method :meth:`asString` was renamed to + :meth:`~email.message.Message.as_string`. -* The method :meth:`ismultipart` was renamed to :meth:`is_multipart`. +* The method :meth:`ismultipart` was renamed to + :meth:`~email.message.Message.is_multipart`. -* The :meth:`get_payload` method has grown a *decode* optional argument. +* The :meth:`~email.message.Message.get_payload` method has grown a *decode* + optional argument. -* The method :meth:`getall` was renamed to :meth:`get_all`. +* The method :meth:`getall` was renamed to + :meth:`~email.message.Message.get_all`. -* The method :meth:`addheader` was renamed to :meth:`add_header`. +* The method :meth:`addheader` was renamed to + :meth:`~email.message.Message.add_header`. * The method :meth:`gettype` was renamed to :meth:`get_type`. @@ -249,48 +264,57 @@ * The method :meth:`getsubtype` was renamed to :meth:`get_subtype`. -* The method :meth:`getparams` was renamed to :meth:`get_params`. Also, whereas - :meth:`getparams` returned a list of strings, :meth:`get_params` returns a list - of 2-tuples, effectively the key/value pairs of the parameters, split on the - ``'='`` sign. +* The method :meth:`getparams` was renamed to + :meth:`~email.message.Message.get_params`. Also, whereas :meth:`getparams` + returned a list of strings, :meth:`~email.message.Message.get_params` returns + a list of 2-tuples, effectively the key/value pairs of the parameters, split + on the ``'='`` sign. -* The method :meth:`getparam` was renamed to :meth:`get_param`. +* The method :meth:`getparam` was renamed to + :meth:`~email.message.Message.get_param`. -* The method :meth:`getcharsets` was renamed to :meth:`get_charsets`. +* The method :meth:`getcharsets` was renamed to + :meth:`~email.message.Message.get_charsets`. -* The method :meth:`getfilename` was renamed to :meth:`get_filename`. +* The method :meth:`getfilename` was renamed to + :meth:`~email.message.Message.get_filename`. -* The method :meth:`getboundary` was renamed to :meth:`get_boundary`. +* The method :meth:`getboundary` was renamed to + :meth:`~email.message.Message.get_boundary`. -* The method :meth:`setboundary` was renamed to :meth:`set_boundary`. +* The method :meth:`setboundary` was renamed to + :meth:`~email.message.Message.set_boundary`. * The method :meth:`getdecodedpayload` was removed. To get similar - functionality, pass the value 1 to the *decode* flag of the get_payload() - method. + functionality, pass the value 1 to the *decode* flag of the + :meth:`~email.message.Message.get_payload` method. * The method :meth:`getpayloadastext` was removed. Similar functionality is - supported by the :class:`DecodedGenerator` class in the :mod:`email.generator` + supported by the :class:`~email.generator.DecodedGenerator` class in the + :mod:`email.generator` module. + +* The method :meth:`getbodyastext` was removed. You can get similar + functionality by creating an iterator with + :func:`~email.iterators.typed_subpart_iterator` in the :mod:`email.iterators` module. -* The method :meth:`getbodyastext` was removed. You can get similar - functionality by creating an iterator with :func:`typed_subpart_iterator` in the - :mod:`email.iterators` module. +The :class:`~email.parser.Parser` class has no differences in its public +interface. It does have some additional smarts to recognize +:mimetype:`message/delivery-status` type messages, which it represents as a +:class:`~email.message.Message` instance containing separate +:class:`~email.message.Message` subparts for each header block in the delivery +status notification [#]_. -The :class:`Parser` class has no differences in its public interface. It does -have some additional smarts to recognize :mimetype:`message/delivery-status` -type messages, which it represents as a :class:`Message` instance containing -separate :class:`Message` subparts for each header block in the delivery status -notification [#]_. - -The :class:`Generator` class has no differences in its public interface. There -is a new class in the :mod:`email.generator` module though, called -:class:`DecodedGenerator` which provides most of the functionality previously -available in the :meth:`Message.getpayloadastext` method. +The :class:`~email.generator.Generator` class has no differences in its public +interface. There is a new class in the :mod:`email.generator` module though, +called :class:`~email.generator.DecodedGenerator` which provides most of the +functionality previously available in the :meth:`Message.getpayloadastext` +method. The following modules and classes have been changed: -* The :class:`MIMEBase` class constructor arguments *_major* and *_minor* have - changed to *_maintype* and *_subtype* respectively. +* The :class:`~email.mime.base.MIMEBase` class constructor arguments *_major* + and *_minor* have changed to *_maintype* and *_subtype* respectively. * The ``Image`` class/module has been renamed to ``MIMEImage``. The *_minor* argument has been renamed to *_subtype*. @@ -303,7 +327,8 @@ but that clashed with the Python standard library module :mod:`rfc822` on some case-insensitive file systems. - Also, the :class:`MIMEMessage` class now represents any kind of MIME message + Also, the :class:`~email.mime.message.MIMEMessage` class now represents any + kind of MIME message with main type :mimetype:`message`. It takes an optional argument *_subtype* which is used to set the MIME subtype. *_subtype* defaults to :mimetype:`rfc822`. @@ -313,8 +338,8 @@ :mod:`email.utils` module. The ``MsgReader`` class/module has been removed. Its functionality is most -closely supported in the :func:`body_line_iterator` function in the -:mod:`email.iterators` module. +closely supported in the :func:`~email.iterators.body_line_iterator` function +in the :mod:`email.iterators` module. .. rubric:: Footnotes diff --git a/Doc/library/email.util.rst b/Doc/library/email.util.rst --- a/Doc/library/email.util.rst +++ b/Doc/library/email.util.rst @@ -41,8 +41,8 @@ This method returns a list of 2-tuples of the form returned by ``parseaddr()``. *fieldvalues* is a sequence of header field values as might be returned by - :meth:`Message.get_all`. Here's a simple example that gets all the recipients - of a message:: + :meth:`Message.get_all `. Here's a simple + example that gets all the recipients of a message:: from email.utils import getaddresses @@ -130,7 +130,8 @@ .. function:: collapse_rfc2231_value(value[, errors[, fallback_charset]]) When a header parameter is encoded in :rfc:`2231` format, - :meth:`Message.get_param` may return a 3-tuple containing the character set, + :meth:`Message.get_param ` may return a + 3-tuple containing the character set, language, and value. :func:`collapse_rfc2231_value` turns this into a unicode string. Optional *errors* is passed to the *errors* argument of the built-in :func:`unicode` function; it defaults to ``replace``. Optional @@ -152,11 +153,12 @@ .. versionchanged:: 2.4 The :func:`decode` function has been removed; use the - :meth:`Header.decode_header` method instead. + :meth:`Header.decode_header ` method + instead. .. versionchanged:: 2.4 - The :func:`encode` function has been removed; use the :meth:`Header.encode` - method instead. + The :func:`encode` function has been removed; use the :meth:`Header.encode + ` method instead. .. rubric:: Footnotes -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 16:39:09 2013 From: python-checkins at python.org (raymond.hettinger) Date: Mon, 19 Aug 2013 16:39:09 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue18771=3A__Reduce_the_?= =?utf-8?q?cost_of_hash_collisions_for_set_objects=2E?= Message-ID: <3cJd7K6YTFz7LjS@mail.python.org> http://hg.python.org/cpython/rev/a887596b841f changeset: 85262:a887596b841f parent: 85260:464eed5ddb2e user: Raymond Hettinger date: Mon Aug 19 07:36:04 2013 -0700 summary: Issue18771: Reduce the cost of hash collisions for set objects. files: Include/setobject.h | 2 +- Objects/setobject.c | 108 +++++++++++++++++++++++++------ 2 files changed, 88 insertions(+), 22 deletions(-) diff --git a/Include/setobject.h b/Include/setobject.h --- a/Include/setobject.h +++ b/Include/setobject.h @@ -51,9 +51,9 @@ */ setentry *table; setentry *(*lookup)(PySetObject *so, PyObject *key, Py_hash_t hash); + Py_hash_t hash; /* only used by frozenset objects */ setentry smalltable[PySet_MINSIZE]; - Py_hash_t hash; /* only used by frozenset objects */ PyObject *weakreflist; /* List of weak references */ }; #endif /* Py_LIMITED_API */ diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -68,6 +68,11 @@ The initial probe index is computed as hash mod the table size. Subsequent probe indices are computed as explained in Objects/dictobject.c. +To improve cache locality, each probe is done in pairs. +After the probe is examined, an adjacent entry is then examined as well. +The likelihood is that an adjacent entry is in the same cache line and +can be examined more cheaply than another probe elsewhere in memory. + All arithmetic on hash should ignore overflow. Unlike the dictionary implementation, the lookkey functions can return @@ -77,7 +82,7 @@ static setentry * set_lookkey(PySetObject *so, PyObject *key, Py_hash_t hash) { - size_t i; /* Unsigned for defined overflow behavior. */ + size_t i, j; /* Unsigned for defined overflow behavior. */ size_t perturb; setentry *freeslot; size_t mask = so->mask; @@ -90,7 +95,6 @@ entry = &table[i]; if (entry->key == NULL || entry->key == key) return entry; - if (entry->hash == hash) { startkey = entry->key; Py_INCREF(startkey); @@ -107,14 +111,15 @@ return set_lookkey(so, key, hash); } } - freeslot = (entry->key == dummy) ? entry : NULL; /* In the loop, key == dummy is by far (factor of 100s) the least likely outcome, so test for that last. */ - for (perturb = hash; ; perturb >>= PERTURB_SHIFT) { - i = i * 5 + perturb + 1; - entry = &table[i & mask]; + j = i; + perturb = hash; + while (1) { + j ^= 1; + entry = &table[j]; if (entry->key == NULL) { if (freeslot != NULL) entry = freeslot; @@ -134,14 +139,42 @@ break; } else { - /* The compare did major nasty stuff to the - * set: start over. - */ return set_lookkey(so, key, hash); } } if (entry->key == dummy && freeslot == NULL) freeslot = entry; + + i = i * 5 + perturb + 1; + j = i & mask; + perturb >>= PERTURB_SHIFT; + + entry = &table[j]; + if (entry->key == NULL) { + if (freeslot != NULL) + entry = freeslot; + break; + } + if (entry->key == key) + break; + if (entry->hash == hash) { + startkey = entry->key; + Py_INCREF(startkey); + cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); + Py_DECREF(startkey); + if (cmp < 0) + return NULL; + if (table == so->table && entry->key == startkey) { + if (cmp > 0) + break; + } + else { + return set_lookkey(so, key, hash); + } + } + if (entry->key == dummy && freeslot == NULL) + freeslot = entry; + } return entry; } @@ -154,7 +187,7 @@ static setentry * set_lookkey_unicode(PySetObject *so, PyObject *key, Py_hash_t hash) { - size_t i; /* Unsigned for defined overflow behavior. */ + size_t i, j; /* Unsigned for defined overflow behavior. */ size_t perturb; setentry *freeslot; size_t mask = so->mask; @@ -169,6 +202,7 @@ so->lookup = set_lookkey; return set_lookkey(so, key, hash); } + i = (size_t)hash & mask; entry = &table[i]; if (entry->key == NULL || entry->key == key) @@ -181,11 +215,37 @@ freeslot = NULL; } - /* In the loop, key == dummy is by far (factor of 100s) the - least likely outcome, so test for that last. */ - for (perturb = hash; ; perturb >>= PERTURB_SHIFT) { + entry = &table[i ^ 1]; + if (entry->key == NULL) + return freeslot == NULL ? entry : freeslot; + if (entry->key == key + || (entry->hash == hash + && entry->key != dummy + && unicode_eq(entry->key, key))) + return entry; + if (entry->key == dummy && freeslot == NULL) + freeslot = entry; + + j = i; + perturb = hash; + while (1) { + j ^= 1; + entry = &table[j]; + if (entry->key == NULL) + return freeslot == NULL ? entry : freeslot; + if (entry->key == key + || (entry->hash == hash + && entry->key != dummy + && unicode_eq(entry->key, key))) + return entry; + if (entry->key == dummy && freeslot == NULL) + freeslot = entry; + i = i * 5 + perturb + 1; - entry = &table[i & mask]; + j = i & mask; + perturb >>= PERTURB_SHIFT; + + entry = &table[j]; if (entry->key == NULL) return freeslot == NULL ? entry : freeslot; if (entry->key == key @@ -244,17 +304,23 @@ static void set_insert_clean(PySetObject *so, PyObject *key, Py_hash_t hash) { - size_t i; - size_t perturb; - size_t mask = (size_t)so->mask; setentry *table = so->table; setentry *entry; + size_t perturb = hash; + size_t mask = (size_t)so->mask; + size_t i, j; - i = (size_t)hash & mask; - entry = &table[i]; - for (perturb = hash; entry->key != NULL; perturb >>= PERTURB_SHIFT) { + i = j = (size_t)hash & mask; + while (1) { + entry = &table[j]; + if (entry->key == NULL) + break; + entry = &table[j ^ 1]; + if (entry->key == NULL) + break; i = i * 5 + perturb + 1; - entry = &table[i & mask]; + j = i & mask; + perturb >>= PERTURB_SHIFT; } so->fill++; entry->key = key; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 17:37:18 2013 From: python-checkins at python.org (christian.heimes) Date: Mon, 19 Aug 2013 17:37:18 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4Nzc3?= =?utf-8?q?=3A_The_ssl_module_now_uses_the_new_CRYPTO=5FTHREADID_API_of?= Message-ID: <3cJfQQ45t9z7LjX@mail.python.org> http://hg.python.org/cpython/rev/f967ded6f9dd changeset: 85263:f967ded6f9dd branch: 3.3 parent: 85259:7a1534dba050 user: Christian Heimes date: Mon Aug 19 17:36:29 2013 +0200 summary: Issue #18777: The ssl module now uses the new CRYPTO_THREADID API of OpenSSL 1.0.0+ instead of the deprecated CRYPTO id callback function. files: Misc/NEWS | 3 +++ Modules/_ssl.c | 18 +++++++++++++++++- 2 files changed, 20 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,9 @@ Library ------- +- Issue #18777: The ssl module now uses the new CRYPTO_THREADID API of + OpenSSL 1.0.0+ instead of the deprecated CRYPTO id callback function. + - Issue #18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. - Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -2610,9 +2610,21 @@ static PyThread_type_lock *_ssl_locks = NULL; -static unsigned long _ssl_thread_id_function (void) { +#if OPENSSL_VERSION_NUMBER >= 0x10000000 +/* use new CRYPTO_THREADID API. */ +static void +_ssl_threadid_callback(CRYPTO_THREADID *id) +{ + CRYPTO_THREADID_set_numeric(id, + (unsigned long)PyThread_get_thread_ident()); +} +#else +/* deprecated CRYPTO_set_id_callback() API. */ +static unsigned long +_ssl_thread_id_function (void) { return PyThread_get_thread_ident(); } +#endif static void _ssl_thread_locking_function (int mode, int n, const char *file, int line) { @@ -2665,7 +2677,11 @@ } } CRYPTO_set_locking_callback(_ssl_thread_locking_function); +#if OPENSSL_VERSION_NUMBER >= 0x10000000 + CRYPTO_THREADID_set_callback(_ssl_threadid_callback); +#else CRYPTO_set_id_callback(_ssl_thread_id_function); +#endif } return 1; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 17:37:19 2013 From: python-checkins at python.org (christian.heimes) Date: Mon, 19 Aug 2013 17:37:19 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318777=3A_The_ssl_module_now_uses_the_new_CRYPTO?= =?utf-8?q?=5FTHREADID_API_of?= Message-ID: <3cJfQR6Llgz7Ljs@mail.python.org> http://hg.python.org/cpython/rev/28e68f4807a2 changeset: 85264:28e68f4807a2 parent: 85262:a887596b841f parent: 85263:f967ded6f9dd user: Christian Heimes date: Mon Aug 19 17:36:39 2013 +0200 summary: Issue #18777: The ssl module now uses the new CRYPTO_THREADID API of OpenSSL 1.0.0+ instead of the deprecated CRYPTO id callback function. files: Misc/NEWS | 3 +++ Modules/_ssl.c | 18 +++++++++++++++++- 2 files changed, 20 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -34,6 +34,9 @@ Library ------- +- Issue #18777: The ssl module now uses the new CRYPTO_THREADID API of + OpenSSL 1.0.0+ instead of the deprecated CRYPTO id callback function. + - Issue #18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. - Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -3136,9 +3136,21 @@ static PyThread_type_lock *_ssl_locks = NULL; -static unsigned long _ssl_thread_id_function (void) { +#if OPENSSL_VERSION_NUMBER >= 0x10000000 +/* use new CRYPTO_THREADID API. */ +static void +_ssl_threadid_callback(CRYPTO_THREADID *id) +{ + CRYPTO_THREADID_set_numeric(id, + (unsigned long)PyThread_get_thread_ident()); +} +#else +/* deprecated CRYPTO_set_id_callback() API. */ +static unsigned long +_ssl_thread_id_function (void) { return PyThread_get_thread_ident(); } +#endif static void _ssl_thread_locking_function (int mode, int n, const char *file, int line) { @@ -3191,7 +3203,11 @@ } } CRYPTO_set_locking_callback(_ssl_thread_locking_function); +#if OPENSSL_VERSION_NUMBER >= 0x10000000 + CRYPTO_THREADID_set_callback(_ssl_threadid_callback); +#else CRYPTO_set_id_callback(_ssl_thread_id_function); +#endif } return 1; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 17:37:21 2013 From: python-checkins at python.org (christian.heimes) Date: Mon, 19 Aug 2013 17:37:21 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4Nzc3?= =?utf-8?q?=3A_The_ssl_module_now_uses_the_new_CRYPTO=5FTHREADID_API_of?= Message-ID: <3cJfQT1jwRz7Ljc@mail.python.org> http://hg.python.org/cpython/rev/5d691723bfbd changeset: 85265:5d691723bfbd branch: 2.7 parent: 85261:062533327ad2 user: Christian Heimes date: Mon Aug 19 17:36:29 2013 +0200 summary: Issue #18777: The ssl module now uses the new CRYPTO_THREADID API of OpenSSL 1.0.0+ instead of the deprecated CRYPTO id callback function. files: Misc/NEWS | 3 +++ Modules/_ssl.c | 18 +++++++++++++++++- 2 files changed, 20 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,9 @@ Library ------- +- Issue #18777: The ssl module now uses the new CRYPTO_THREADID API of + OpenSSL 1.0.0+ instead of the deprecated CRYPTO id callback function. + - Issue #18768: Correct doc string of RAND_edg(). Patch by Vajrasky Kok. - Issue #18178: Fix ctypes on BSD. dlmalloc.c was compiled twice which broke diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -1649,9 +1649,21 @@ static PyThread_type_lock *_ssl_locks = NULL; -static unsigned long _ssl_thread_id_function (void) { +#if OPENSSL_VERSION_NUMBER >= 0x10000000 +/* use new CRYPTO_THREADID API. */ +static void +_ssl_threadid_callback(CRYPTO_THREADID *id) +{ + CRYPTO_THREADID_set_numeric(id, + (unsigned long)PyThread_get_thread_ident()); +} +#else +/* deprecated CRYPTO_set_id_callback() API. */ +static unsigned long +_ssl_thread_id_function (void) { return PyThread_get_thread_ident(); } +#endif static void _ssl_thread_locking_function (int mode, int n, const char *file, int line) { /* this function is needed to perform locking on shared data @@ -1702,7 +1714,11 @@ } } CRYPTO_set_locking_callback(_ssl_thread_locking_function); +#if OPENSSL_VERSION_NUMBER >= 0x10000000 + CRYPTO_THREADID_set_callback(_ssl_threadid_callback); +#else CRYPTO_set_id_callback(_ssl_thread_id_function); +#endif } return 1; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 18:12:29 2013 From: python-checkins at python.org (raymond.hettinger) Date: Mon, 19 Aug 2013 18:12:29 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_18774=3A__Update_new?= =?utf-8?q?s_and_whatsnew_for_the_set_optimizations?= Message-ID: <3cJgC1127Wz7LjS@mail.python.org> http://hg.python.org/cpython/rev/a02339bda413 changeset: 85266:a02339bda413 parent: 85264:28e68f4807a2 user: Raymond Hettinger date: Mon Aug 19 09:12:20 2013 -0700 summary: Issue 18774: Update news and whatsnew for the set optimizations files: Doc/whatsnew/3.4.rst | 6 ++++++ Misc/NEWS | 4 ++++ 2 files changed, 10 insertions(+), 0 deletions(-) diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -389,6 +389,12 @@ * The UTF-32 decoder is now 3x to 4x faster. +* The cost of hash collisions for sets is now reduced. Each hash table + probe now checks a second key/hash pair for each cache line retrieved. + This exploits cache locality to make collision resolution less expensive. + + (Contributed by Raymond Hetting in :issue"`18771`.) + Build and C API Changes ======================= diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -12,6 +12,10 @@ - Issue #18774: Remove last bits of GNU PTH thread code and thread_pth.h. +- Issue #18771: Add optimization to set object lookups to reduce the cost + of hash collisions. The core idea is to inspect a second key/hash pair + for each cache line retrieved. + - Issue #16105: When a signal handler fails to write to the file descriptor registered with ``signal.set_wakeup_fd()``, report an exception instead of ignoring the error. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 18:30:20 2013 From: python-checkins at python.org (christian.heimes) Date: Mon, 19 Aug 2013 18:30:20 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_mention_digestmod?= Message-ID: <3cJgbc2X2lz7LjX@mail.python.org> http://hg.python.org/peps/rev/0cc8684e72ab changeset: 5064:0cc8684e72ab user: Christian Heimes date: Mon Aug 19 18:30:13 2013 +0200 summary: mention digestmod fix typo (thx to Francisco Martin Brugue) files: pep-0452.txt | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pep-0452.txt b/pep-0452.txt --- a/pep-0452.txt +++ b/pep-0452.txt @@ -26,7 +26,7 @@ Hash function modules define one function: new([string]) (unkeyed hashes) - new([key] , [string]) (keyed hashes) + new(key, [string], [digestmod]) (keyed hashes) Create a new hashing object and return it. The first form is for hashes that are unkeyed, such as MD5 or SHA. For keyed @@ -201,7 +201,7 @@ Code may depend on the fact that the argument is called 'string'. -Recommanded names for common hashing algorithms +Recommended names for common hashing algorithms algorithm variant recommended name ---------- --------- ---------------- -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Mon Aug 19 19:07:28 2013 From: python-checkins at python.org (andrew.kuchling) Date: Mon, 19 Aug 2013 19:07:28 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Typo_fix?= Message-ID: <3cJhQS4QXrz7LjX@mail.python.org> http://hg.python.org/cpython/rev/4a318a45c4c3 changeset: 85267:4a318a45c4c3 user: Andrew Kuchling date: Mon Aug 19 13:07:18 2013 -0400 summary: Typo fix files: Doc/whatsnew/3.4.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -393,7 +393,7 @@ probe now checks a second key/hash pair for each cache line retrieved. This exploits cache locality to make collision resolution less expensive. - (Contributed by Raymond Hetting in :issue"`18771`.) + (Contributed by Raymond Hettinger in :issue"`18771`.) Build and C API Changes -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 22:21:17 2013 From: python-checkins at python.org (victor.stinner) Date: Mon, 19 Aug 2013 22:21:17 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_add_a_new_=22Only_?= =?utf-8?q?Inherit_Some_Handles_on_Windows=22_section?= Message-ID: <3cJmk55zgFz7Ljb@mail.python.org> http://hg.python.org/peps/rev/0ad2b188207c changeset: 5065:0ad2b188207c user: Victor Stinner date: Mon Aug 19 22:20:41 2013 +0200 summary: PEP 446: add a new "Only Inherit Some Handles on Windows" section files: pep-0446.txt | 53 ++++++++++++++++++++++++++++++++++++++- 1 files changed, 51 insertions(+), 2 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -83,12 +83,61 @@ * `Handle Inheritance `_ -* `Q315939: PRB: Child Inherits Unintended Handles During - CreateProcess Call `_ * `Stackoverflow: Can TCP SOCKET handles be set not inheritable? `_ +Only Inherit Some Handles on Windows +------------------------------------ + +Since Windows Vista, ``CreateProcess()`` supports an extension of the +STARTUPINFO struture: the `STARTUPINFOEX structure +`_. +Using this new structure, it is possible to specify a list of handles to +inherit: ``PROC_THREAD_ATTRIBUTE_HANDLE_LIST``. Read `Programmatically +controlling which handles are inherited by new processes in Win32 +`_ +(Raymond Chen, Dec 2011) for more information. + +Before Windows Vista, it is possible to make handles inheritable and +call ``CreateProcess()`` with ``bInheritHandles=TRUE``. This option +works if all other handles are non-inheritable. There is a race +condition: if another thread calls ``CreateProcess()`` with +``bInheritHandles=TRUE``, handles will also be inherited in the second +process. + +Microsoft suggests to use a lock to avoid the race condition: read +`Q315939: PRB: Child Inherits Unintended Handles During CreateProcess +Call `_ (last review: +November 2006). The `Python issue #16500 "Add an atfork module" +`_ proposes to add such lock, it can +be used to make handles non-inheritable without the race condition. Such +lock only protects against a race condition between Python threads, C +threads are not protected. + +Another option is to duplicate handles that must be inherited, pass the +number of the duplicated handles to the child process, so the child +process can steal duplicated handles using `DuplicateHandle() +`_ +with ``DUPLICATE_CLOSE_SOURCE``. Handle numbers change between the +parent and the child process because the handles are duplicated (twice), +the parent and/or the child process may be adapted to handle this +change. If the child program cannot be modified, an intermediate program +can be used to steal handles from the parent process before spawning the +final child program. The intermediate has to pass the handle of the +child process to the parent process. The parent may have to close +duplicated handles if all handles were not stolen, if the intermediate +process failed for example. If the command line is used to pass the +handle numbers, the command line must be modified when handle are +duplicated, because their number are modified. + +This PEP does not include a solution to this problem because there is no +perfect solution working on all Windows versions. This point is deferred +until use cases relying on handle or file descriptor inheritance on +Windows are well known to choose the best solution, and test carefully +the implementation. + + Inheritance of File Descriptors on UNIX --------------------------------------- -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Mon Aug 19 22:21:19 2013 From: python-checkins at python.org (victor.stinner) Date: Mon, 19 Aug 2013 22:21:19 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_move_links_into_th?= =?utf-8?q?e_text_directly=2C_not_at_the_end?= Message-ID: <3cJmk70Sn8z7Ljc@mail.python.org> http://hg.python.org/peps/rev/ae76d5530e91 changeset: 5066:ae76d5530e91 user: Victor Stinner date: Mon Aug 19 22:21:02 2013 +0200 summary: PEP 446: move links into the text directly, not at the end files: pep-0446.txt | 35 +++++++++++++++++------------------ 1 files changed, 17 insertions(+), 18 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -215,6 +215,10 @@ * `Squid `_: open since 2012-07 +See also: `Excuse me son, but your code is leaking !!! +`_ (Dan Walsh, March 2012) +for SELinux issues with leaked file descriptors. + Security Vulnerability ---------------------- @@ -347,6 +351,17 @@ creating atomically a non-inheritable socket * "X": not supported yet +See also: + +* `Secure File Descriptor Handling + `_ (Ulrich Drepper, + 2008) +* `Ghosts of Unix past, part 2: Conflated designs + `_ (Neil Brown, 2010) explains the + history of ``O_CLOEXEC`` and ``O_NONBLOCK`` flags +* `File descriptor handling changes in 2.6.27 + `_ + Status of Python 3.3 -------------------- @@ -588,10 +603,8 @@ ``os.dup2()`` to redict stdin, stdout and stderr. -Links -===== - -Python issues: +Python Issues +============= * `#10115: Support accept4() for atomic setting of flags at socket creation `_ @@ -611,20 +624,6 @@ * `#18571: Implementation of the PEP 446: non-inheritable file descriptors `_ -Other links: - -* `Secure File Descriptor Handling - `_ (Ulrich Drepper, - 2008) -* `Ghosts of Unix past, part 2: Conflated designs - `_ (Neil Brown, 2010) explains the - history of ``O_CLOEXEC`` and ``O_NONBLOCK`` flags -* `File descriptor handling changes in 2.6.27 - `_ -* `Excuse me son, but your code is leaking !!! - `_ (Dan Walsh, - March 2012): SELinux issues with leaked file descriptors - Copyright ========= -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Mon Aug 19 22:30:25 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 22:30:25 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NjQ3?= =?utf-8?q?=3A_Correctly_bound_calculated_min/max_width_of_a_subexpression?= =?utf-8?q?=2E?= Message-ID: <3cJmwd3FTQzRhf@mail.python.org> http://hg.python.org/cpython/rev/de049e9abdf7 changeset: 85268:de049e9abdf7 branch: 3.3 parent: 85263:f967ded6f9dd user: Serhiy Storchaka date: Mon Aug 19 22:50:54 2013 +0300 summary: Issue #18647: Correctly bound calculated min/max width of a subexpression. Now max width is MAXREPEAT on 32- and 64-bit platforms when one of subexpressions is unbounded repetition. files: Lib/sre_parse.py | 8 ++++---- 1 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py --- a/Lib/sre_parse.py +++ b/Lib/sre_parse.py @@ -148,7 +148,7 @@ REPEATCODES = (MIN_REPEAT, MAX_REPEAT) for op, av in self.data: if op is BRANCH: - i = sys.maxsize + i = MAXREPEAT - 1 j = 0 for av in av[1]: l, h = av.getwidth() @@ -166,14 +166,14 @@ hi = hi + j elif op in REPEATCODES: i, j = av[2].getwidth() - lo = lo + int(i) * av[0] - hi = hi + int(j) * av[1] + lo = lo + i * av[0] + hi = hi + j * av[1] elif op in UNITCODES: lo = lo + 1 hi = hi + 1 elif op == SUCCESS: break - self.width = int(min(lo, sys.maxsize)), int(min(hi, sys.maxsize)) + self.width = min(lo, MAXREPEAT - 1), min(hi, MAXREPEAT) return self.width class Tokenizer: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 22:30:26 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 22:30:26 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318647=3A_Correctly_bound_calculated_min/max_wid?= =?utf-8?q?th_of_a_subexpression=2E?= Message-ID: <3cJmwf59Y1zSrM@mail.python.org> http://hg.python.org/cpython/rev/e47f2dc564bc changeset: 85269:e47f2dc564bc parent: 85267:4a318a45c4c3 parent: 85268:de049e9abdf7 user: Serhiy Storchaka date: Mon Aug 19 22:53:04 2013 +0300 summary: Issue #18647: Correctly bound calculated min/max width of a subexpression. Now max width is MAXREPEAT on 32- and 64-bit platforms when one of subexpressions is unbounded repetition. files: Lib/sre_parse.py | 8 ++++---- 1 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py --- a/Lib/sre_parse.py +++ b/Lib/sre_parse.py @@ -148,7 +148,7 @@ REPEATCODES = (MIN_REPEAT, MAX_REPEAT) for op, av in self.data: if op is BRANCH: - i = sys.maxsize + i = MAXREPEAT - 1 j = 0 for av in av[1]: l, h = av.getwidth() @@ -166,14 +166,14 @@ hi = hi + j elif op in REPEATCODES: i, j = av[2].getwidth() - lo = lo + int(i) * av[0] - hi = hi + int(j) * av[1] + lo = lo + i * av[0] + hi = hi + j * av[1] elif op in UNITCODES: lo = lo + 1 hi = hi + 1 elif op == SUCCESS: break - self.width = int(min(lo, sys.maxsize)), int(min(hi, sys.maxsize)) + self.width = min(lo, MAXREPEAT - 1), min(hi, MAXREPEAT) return self.width class Tokenizer: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 22:30:28 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 22:30:28 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NjQ3?= =?utf-8?q?=3A_Correctly_bound_calculated_min/max_width_of_a_subexpression?= =?utf-8?q?=2E?= Message-ID: <3cJmwh09Tyz7LjV@mail.python.org> http://hg.python.org/cpython/rev/d10c287c200c changeset: 85270:d10c287c200c branch: 2.7 parent: 85265:5d691723bfbd user: Serhiy Storchaka date: Mon Aug 19 22:53:46 2013 +0300 summary: Issue #18647: Correctly bound calculated min/max width of a subexpression. Now max width is MAXREPEAT on 32- and 64-bit platforms when one of subexpressions is unbounded repetition. files: Lib/sre_parse.py | 10 +++++----- 1 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py --- a/Lib/sre_parse.py +++ b/Lib/sre_parse.py @@ -142,12 +142,12 @@ # determine the width (min, max) for this subpattern if self.width: return self.width - lo = hi = 0L + lo = hi = 0 UNITCODES = (ANY, RANGE, IN, LITERAL, NOT_LITERAL, CATEGORY) REPEATCODES = (MIN_REPEAT, MAX_REPEAT) for op, av in self.data: if op is BRANCH: - i = sys.maxint + i = MAXREPEAT - 1 j = 0 for av in av[1]: l, h = av.getwidth() @@ -165,14 +165,14 @@ hi = hi + j elif op in REPEATCODES: i, j = av[2].getwidth() - lo = lo + long(i) * av[0] - hi = hi + long(j) * av[1] + lo = lo + i * av[0] + hi = hi + j * av[1] elif op in UNITCODES: lo = lo + 1 hi = hi + 1 elif op == SUCCESS: break - self.width = int(min(lo, sys.maxint)), int(min(hi, sys.maxint)) + self.width = min(lo, MAXREPEAT - 1), min(hi, MAXREPEAT) return self.width class Tokenizer: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 22:30:29 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 22:30:29 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NjQ3?= =?utf-8?q?=3A_A_regular_expression_in_the_doctest_module_rewritten_so_tha?= =?utf-8?q?t?= Message-ID: <3cJmwj23wDz7Ljc@mail.python.org> http://hg.python.org/cpython/rev/19ed2fbb8e6b changeset: 85271:19ed2fbb8e6b branch: 3.3 parent: 85268:de049e9abdf7 user: Serhiy Storchaka date: Mon Aug 19 22:59:31 2013 +0300 summary: Issue #18647: A regular expression in the doctest module rewritten so that determined minimal width of repeated subexpression is >0 (an empty line was not matched in any case). files: Lib/doctest.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/doctest.py b/Lib/doctest.py --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -553,7 +553,7 @@ # Want consists of any non-blank lines that do not start with PS1. (?P (?:(?![ ]*$) # Not a blank line (?![ ]*>>>) # Not a line starting with PS1 - .*$\n? # But any other line + .+$\n? # But any other line )*) ''', re.MULTILINE | re.VERBOSE) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 22:30:30 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 22:30:30 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318647=3A_A_regular_expression_in_the_doctest_mo?= =?utf-8?q?dule_rewritten_so_that?= Message-ID: <3cJmwk3pyDz7Lk1@mail.python.org> http://hg.python.org/cpython/rev/8b24818c7327 changeset: 85272:8b24818c7327 parent: 85269:e47f2dc564bc parent: 85271:19ed2fbb8e6b user: Serhiy Storchaka date: Mon Aug 19 23:04:33 2013 +0300 summary: Issue #18647: A regular expression in the doctest module rewritten so that determined minimal width of repeated subexpression is >0 (an empty line was not matched in any case). files: Lib/doctest.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/doctest.py b/Lib/doctest.py --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -557,7 +557,7 @@ # Want consists of any non-blank lines that do not start with PS1. (?P (?:(?![ ]*$) # Not a blank line (?![ ]*>>>) # Not a line starting with PS1 - .*$\n? # But any other line + .+$\n? # But any other line )*) ''', re.MULTILINE | re.VERBOSE) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 22:30:31 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 22:30:31 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NjQ3?= =?utf-8?q?=3A_A_regular_expression_in_the_doctest_module_rewritten_so_tha?= =?utf-8?q?t?= Message-ID: <3cJmwl5k0Xz7Ljp@mail.python.org> http://hg.python.org/cpython/rev/c2dc99ec46bc changeset: 85273:c2dc99ec46bc branch: 2.7 parent: 85270:d10c287c200c user: Serhiy Storchaka date: Mon Aug 19 23:04:52 2013 +0300 summary: Issue #18647: A regular expression in the doctest module rewritten so that determined minimal width of repeated subexpression is >0 (an empty line was not matched in any case). files: Lib/doctest.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/doctest.py b/Lib/doctest.py --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -564,7 +564,7 @@ # Want consists of any non-blank lines that do not start with PS1. (?P (?:(?![ ]*$) # Not a blank line (?![ ]*>>>) # Not a line starting with PS1 - .*$\n? # But any other line + .+$\n? # But any other line )*) ''', re.MULTILINE | re.VERBOSE) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 22:30:33 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 22:30:33 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzI1Mzc6?= =?utf-8?q?_Remove_breaked_check_which_prevented_valid_regular_expressions?= =?utf-8?q?=2E?= Message-ID: <3cJmwn0lTSz7Lk9@mail.python.org> http://hg.python.org/cpython/rev/7ab07f15d78c changeset: 85274:7ab07f15d78c branch: 3.3 parent: 85271:19ed2fbb8e6b user: Serhiy Storchaka date: Mon Aug 19 23:18:23 2013 +0300 summary: Issue #2537: Remove breaked check which prevented valid regular expressions. Patch by Meador Inge. See also issue #18647. files: Lib/sre_compile.py | 2 -- Lib/test/test_re.py | 10 ++++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/Lib/sre_compile.py b/Lib/sre_compile.py --- a/Lib/sre_compile.py +++ b/Lib/sre_compile.py @@ -358,8 +358,6 @@ def _simple(av): # check if av is a "simple" operator lo, hi = av[2].getwidth() - #if lo == 0 and hi == MAXREPEAT: - # raise error("nothing to repeat") return lo == hi == 1 and av[2][0][0] != SUBPATTERN def _compile_info(code, pattern, flags): diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -1051,6 +1051,16 @@ [b'xyz'], msg=pattern) + def test_bug_2537(self): + # issue 2537: empty submatches + for outer_op in ('{0,}', '*', '+', '{1,187}'): + for inner_op in ('{0,}', '*', '?'): + r = re.compile("^((x|y)%s)%s" % (inner_op, outer_op)) + m = r.match("xyyzy") + self.assertEqual(m.group(0), "xyy") + self.assertEqual(m.group(1), "") + self.assertEqual(m.group(2), "y") + def run_re_tests(): from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR if verbose: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 22:30:34 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 22:30:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=232537=3A_Remove_breaked_check_which_prevented_va?= =?utf-8?q?lid_regular_expressions=2E?= Message-ID: <3cJmwp2pHmz7LjY@mail.python.org> http://hg.python.org/cpython/rev/f4271cc2dfb5 changeset: 85275:f4271cc2dfb5 parent: 85272:8b24818c7327 parent: 85274:7ab07f15d78c user: Serhiy Storchaka date: Mon Aug 19 23:19:49 2013 +0300 summary: Issue #2537: Remove breaked check which prevented valid regular expressions. Patch by Meador Inge. See also issue #18647. files: Lib/sre_compile.py | 2 -- Lib/test/test_re.py | 10 ++++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/Lib/sre_compile.py b/Lib/sre_compile.py --- a/Lib/sre_compile.py +++ b/Lib/sre_compile.py @@ -351,8 +351,6 @@ def _simple(av): # check if av is a "simple" operator lo, hi = av[2].getwidth() - #if lo == 0 and hi == MAXREPEAT: - # raise error("nothing to repeat") return lo == hi == 1 and av[2][0][0] != SUBPATTERN def _compile_info(code, pattern, flags): diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -1051,6 +1051,16 @@ [b'xyz'], msg=pattern) + def test_bug_2537(self): + # issue 2537: empty submatches + for outer_op in ('{0,}', '*', '+', '{1,187}'): + for inner_op in ('{0,}', '*', '?'): + r = re.compile("^((x|y)%s)%s" % (inner_op, outer_op)) + m = r.match("xyyzy") + self.assertEqual(m.group(0), "xyy") + self.assertEqual(m.group(1), "") + self.assertEqual(m.group(2), "y") + def run_re_tests(): from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR if verbose: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 22:30:35 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Mon, 19 Aug 2013 22:30:35 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzI1Mzc6?= =?utf-8?q?_Remove_breaked_check_which_prevented_valid_regular_expressions?= =?utf-8?q?=2E?= Message-ID: <3cJmwq5S36zSrM@mail.python.org> http://hg.python.org/cpython/rev/7b867a46a8b4 changeset: 85276:7b867a46a8b4 branch: 2.7 parent: 85273:c2dc99ec46bc user: Serhiy Storchaka date: Mon Aug 19 23:20:07 2013 +0300 summary: Issue #2537: Remove breaked check which prevented valid regular expressions. Patch by Meador Inge. See also issue #18647. files: Lib/sre_compile.py | 2 -- Lib/test/test_re.py | 10 ++++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/Lib/sre_compile.py b/Lib/sre_compile.py --- a/Lib/sre_compile.py +++ b/Lib/sre_compile.py @@ -355,8 +355,6 @@ def _simple(av): # check if av is a "simple" operator lo, hi = av[2].getwidth() - #if lo == 0 and hi == MAXREPEAT: - # raise error, "nothing to repeat" return lo == hi == 1 and av[2][0][0] != SUBPATTERN def _compile_info(code, pattern, flags): diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -908,6 +908,16 @@ [b'xyz'], msg=pattern) + def test_bug_2537(self): + # issue 2537: empty submatches + for outer_op in ('{0,}', '*', '+', '{1,187}'): + for inner_op in ('{0,}', '*', '?'): + r = re.compile("^((x|y)%s)%s" % (inner_op, outer_op)) + m = r.match("xyyzy") + self.assertEqual(m.group(0), "xyy") + self.assertEqual(m.group(1), "") + self.assertEqual(m.group(2), "y") + def run_re_tests(): from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR if verbose: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Aug 19 23:31:31 2013 From: python-checkins at python.org (antoine.pitrou) Date: Mon, 19 Aug 2013 23:31:31 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=237732=3A_try_to_fi?= =?utf-8?q?x_test=5Fbug7732=27s_flakiness_on_Windows_by_executing_it_in?= Message-ID: <3cJpH71LFzz7Ljb@mail.python.org> http://hg.python.org/cpython/rev/4f7845be9e23 changeset: 85277:4f7845be9e23 parent: 85275:f4271cc2dfb5 user: Antoine Pitrou date: Mon Aug 19 23:31:18 2013 +0200 summary: Issue #7732: try to fix test_bug7732's flakiness on Windows by executing it in a fresh temporary directory. files: Lib/test/test_imp.py | 8 +++----- 1 files changed, 3 insertions(+), 5 deletions(-) diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -278,13 +278,11 @@ @unittest.skipIf(sys.dont_write_bytecode, "test meaningful only when writing bytecode") def test_bug7732(self): - source = support.TESTFN + '.py' - os.mkdir(source) - try: + with support.temp_cwd(): + source = support.TESTFN + '.py' + os.mkdir(source) self.assertRaisesRegex(ImportError, '^No module', imp.find_module, support.TESTFN, ["."]) - finally: - os.rmdir(source) class ReloadTests(unittest.TestCase): -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Tue Aug 20 06:07:26 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Tue, 20 Aug 2013 06:07:26 +0200 Subject: [Python-checkins] Daily reference leaks (4f7845be9e23): sum=-1 Message-ID: results for 4f7845be9e23 on branch "default" -------------------------------------------- test_support leaked [0, 0, -1] references, sum=-1 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflog1D0Nx0', '-x'] From ezio.melotti at gmail.com Tue Aug 20 17:58:15 2013 From: ezio.melotti at gmail.com (Ezio Melotti) Date: Tue, 20 Aug 2013 18:58:15 +0300 Subject: [Python-checkins] cpython (2.7): Issue #18592: Add docstrings to file being tested (idlelib.SearchDialogBase.py). In-Reply-To: <3cJCcM3NGgz7LjQ@mail.python.org> References: <3cJCcM3NGgz7LjQ@mail.python.org> Message-ID: Hi, On Mon, Aug 19, 2013 at 1:29 AM, terry.reedy wrote: > http://hg.python.org/cpython/rev/0d2b87ec9f2b > changeset: 85246:0d2b87ec9f2b > branch: 2.7 > parent: 85243:72c7a4cd4f55 > user: Terry Jan Reedy > date: Sun Aug 18 18:22:34 2013 -0400 > summary: > Issue #18592: Add docstrings to file being tested ( > idlelib.SearchDialogBase.py). > > files: > Lib/idlelib/SearchDialogBase.py | 17 +++++++++++++++++ > 1 files changed, 17 insertions(+), 0 deletions(-) > > > diff --git a/Lib/idlelib/SearchDialogBase.py > b/Lib/idlelib/SearchDialogBase.py > --- a/Lib/idlelib/SearchDialogBase.py > +++ b/Lib/idlelib/SearchDialogBase.py > @@ -1,6 +1,23 @@ > +'''Define SearchDialogBase used by Search, Replace, and Grep dialogs.''' > from Tkinter import * > > class SearchDialogBase: > + '''Create most of a modal search dialog (make_frame, create_widgets). > + > + The wide left column contains: > + 1 or 2 text entry lines (create_entries, make_entry); > + a row of standard radiobuttons (create_option_buttons); > + a row of dialog specific radiobuttons (create_other_buttons). > + > Should this be "radio buttons"? Best Regards, Ezio Melotti > + The narrow right column contains command buttons > + (create_command_buttons, make_button). > + These are bound to functions that execute the command. > + > + Except for command buttons, this base class is not limited to > + items common to all three subclasses. Rather, it is the Find dialog > + minus the "Find Next" command and its execution function. > + The other dialogs override methods to replace and add widgets. > + ''' > > title = "Search Dialog" > icon = "Search" > > -- > Repository URL: http://hg.python.org/cpython > > _______________________________________________ > Python-checkins mailing list > Python-checkins at python.org > http://mail.python.org/mailman/listinfo/python-checkins > > -------------- next part -------------- An HTML attachment was scrubbed... URL: From python-checkins at python.org Tue Aug 20 19:12:01 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Tue, 20 Aug 2013 19:12:01 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzEzNDYx?= =?utf-8?q?=3A_Fix_a_crash_in_the_TextIOWrapper=2Etell_method_on_64-bit_pl?= =?utf-8?q?atforms=2E?= Message-ID: <3cKJTF30rhz7Lk5@mail.python.org> http://hg.python.org/cpython/rev/826233404be8 changeset: 85278:826233404be8 branch: 3.3 parent: 85274:7ab07f15d78c user: Serhiy Storchaka date: Tue Aug 20 20:04:47 2013 +0300 summary: Issue #13461: Fix a crash in the TextIOWrapper.tell method on 64-bit platforms. Patch by Yogesh Chaudhari. files: Misc/NEWS | 3 +++ Modules/_io/textio.c | 2 +- 2 files changed, 4 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,9 @@ Library ------- +- Issue #13461: Fix a crash in the TextIOWrapper.tell method on 64-bit + platforms. Patch by Yogesh Chaudhari. + - Issue #18777: The ssl module now uses the new CRYPTO_THREADID API of OpenSSL 1.0.0+ instead of the deprecated CRYPTO id callback function. diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -2370,7 +2370,7 @@ while (input < input_end) { Py_ssize_t n; - DECODER_DECODE(input, 1, n); + DECODER_DECODE(input, (Py_ssize_t)1, n); /* We got n chars for 1 byte */ chars_decoded += n; cookie.bytes_to_feed += 1; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 20 19:12:02 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Tue, 20 Aug 2013 19:12:02 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2313461=3A_Fix_a_crash_in_the_TextIOWrapper=2Etel?= =?utf-8?q?l_method_on_64-bit_platforms=2E?= Message-ID: <3cKJTG54WDz7Lk8@mail.python.org> http://hg.python.org/cpython/rev/6c9d49b8e3ec changeset: 85279:6c9d49b8e3ec parent: 85277:4f7845be9e23 parent: 85278:826233404be8 user: Serhiy Storchaka date: Tue Aug 20 20:07:50 2013 +0300 summary: Issue #13461: Fix a crash in the TextIOWrapper.tell method on 64-bit platforms. Patch by Yogesh Chaudhari. files: Misc/NEWS | 3 +++ Modules/_io/textio.c | 2 +- 2 files changed, 4 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -284,6 +284,9 @@ Library ------- +- Issue #13461: Fix a crash in the TextIOWrapper.tell method on 64-bit + platforms. Patch by Yogesh Chaudhari. + - Issue #18681: Fix a NameError in importlib.reload() (noticed by Weizhao Li). - Issue #14323: Expanded the number of digits in the coefficients for the diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -2368,7 +2368,7 @@ while (input < input_end) { Py_ssize_t n; - DECODER_DECODE(input, 1, n); + DECODER_DECODE(input, (Py_ssize_t)1, n); /* We got n chars for 1 byte */ chars_decoded += n; cookie.bytes_to_feed += 1; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 20 19:12:03 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Tue, 20 Aug 2013 19:12:03 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzEzNDYx?= =?utf-8?q?=3A_Fix_a_crash_in_the_TextIOWrapper=2Etell_method_and_in_the_?= =?utf-8?b?InJlcGxhY2Ui?= Message-ID: <3cKJTH75pVz7LkP@mail.python.org> http://hg.python.org/cpython/rev/5e679ef2a55c changeset: 85280:5e679ef2a55c branch: 2.7 parent: 85276:7b867a46a8b4 user: Serhiy Storchaka date: Tue Aug 20 20:08:53 2013 +0300 summary: Issue #13461: Fix a crash in the TextIOWrapper.tell method and in the "replace" error handler on 64-bit platforms. Patch by Yogesh Chaudhari. files: Misc/ACKS | 1 + Misc/NEWS | 6 ++++++ Modules/_io/textio.c | 2 +- Python/codecs.c | 2 +- 4 files changed, 9 insertions(+), 2 deletions(-) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -170,6 +170,7 @@ Mitch Chapman Greg Chapman Brad Chapman +Yogesh Chaudhari David Chaum Nicolas Chauvat Michael Chermside diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -9,6 +9,9 @@ Core and Builtins ----------------- +- Issue #13461: Fix a crash in the "replace" error handler on 64-bit platforms. + Patch by Yogesh Chaudhari. + - Issue #15866: The xmlcharrefreplace error handler no more produces two XML entities for a non-BMP character on narrow build. @@ -29,6 +32,9 @@ Library ------- +- Issue #13461: Fix a crash in the TextIOWrapper.tell method on 64-bit + platforms. Patch by Yogesh Chaudhari. + - Issue #18777: The ssl module now uses the new CRYPTO_THREADID API of OpenSSL 1.0.0+ instead of the deprecated CRYPTO id callback function. diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -2271,7 +2271,7 @@ int dec_flags; PyObject *decoded = PyObject_CallMethod( - self->decoder, "decode", "s#", input, 1); + self->decoder, "decode", "s#", input, (Py_ssize_t)1); if (check_decoded(decoded) < 0) goto fail; chars_decoded += PyUnicode_GET_SIZE(decoded); diff --git a/Python/codecs.c b/Python/codecs.c --- a/Python/codecs.c +++ b/Python/codecs.c @@ -521,7 +521,7 @@ Py_UNICODE res = Py_UNICODE_REPLACEMENT_CHARACTER; if (PyUnicodeDecodeError_GetEnd(exc, &end)) return NULL; - return Py_BuildValue("(u#n)", &res, 1, end); + return Py_BuildValue("(u#n)", &res, (Py_ssize_t)1, end); } else if (PyObject_IsInstance(exc, PyExc_UnicodeTranslateError)) { PyObject *res; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 20 19:59:35 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Tue, 20 Aug 2013 19:59:35 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzg4NjU6?= =?utf-8?q?_Concurrent_invocation_of_select=2Epoll=2Epoll=28=29_now_raises?= =?utf-8?q?_a?= Message-ID: <3cKKX70CLPzRrM@mail.python.org> http://hg.python.org/cpython/rev/072ba5df77e4 changeset: 85281:072ba5df77e4 branch: 3.3 parent: 85278:826233404be8 user: Serhiy Storchaka date: Tue Aug 20 20:38:21 2013 +0300 summary: Issue #8865: Concurrent invocation of select.poll.poll() now raises a RuntimeError exception. Patch by Christian Schubert. files: Lib/test/test_poll.py | 42 ++++++++++++++++++++++++++++- Misc/ACKS | 1 + Misc/NEWS | 3 ++ Modules/selectmodule.c | 13 +++++++++ 4 files changed, 57 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_poll.py b/Lib/test/test_poll.py --- a/Lib/test/test_poll.py +++ b/Lib/test/test_poll.py @@ -1,8 +1,16 @@ # Test case for the os.poll() function -import os, select, random, unittest +import os +import random +import select import _testcapi -from test.support import TESTFN, run_unittest +try: + import threading +except ImportError: + threading = None +import time +import unittest +from test.support import TESTFN, run_unittest, reap_threads try: select.poll @@ -160,6 +168,36 @@ self.assertRaises(OverflowError, pollster.poll, _testcapi.INT_MAX + 1) self.assertRaises(OverflowError, pollster.poll, _testcapi.UINT_MAX + 1) + @unittest.skipUnless(threading, 'Threading required for this test.') + @reap_threads + def test_threaded_poll(self): + r, w = os.pipe() + self.addCleanup(os.close, r) + self.addCleanup(os.close, w) + rfds = [] + for i in range(10): + fd = os.dup(r) + self.addCleanup(os.close, fd) + rfds.append(fd) + pollster = select.poll() + for fd in rfds: + pollster.register(fd, select.POLLIN) + + t = threading.Thread(target=pollster.poll) + t.start() + try: + time.sleep(0.5) + # trigger ufds array reallocation + for fd in rfds: + pollster.unregister(fd) + pollster.register(w, select.POLLOUT) + self.assertRaises(RuntimeError, pollster.poll) + finally: + # and make the call to poll() from the thread return + os.write(w, b'spam') + t.join() + + def test_main(): run_unittest(PollTests) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1097,6 +1097,7 @@ Scott Schram Robin Schreiber Chad J. Schroeder +Christian Schubert Sam Schulenburg Stefan Schwarzer Dietmar Schwertberger diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,9 @@ Library ------- +- Issue #8865: Concurrent invocation of select.poll.poll() now raises a + RuntimeError exception. Patch by Christian Schubert. + - Issue #13461: Fix a crash in the TextIOWrapper.tell method on 64-bit platforms. Patch by Yogesh Chaudhari. diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -332,6 +332,7 @@ int ufd_uptodate; int ufd_len; struct pollfd *ufds; + int poll_running; } pollObject; static PyTypeObject poll_Type; @@ -528,16 +529,27 @@ return NULL; } + /* Avoid concurrent poll() invocation, issue 8865 */ + if (self->poll_running) { + PyErr_SetString(PyExc_RuntimeError, + "concurrent poll() invocation"); + return NULL; + } + /* Ensure the ufd array is up to date */ if (!self->ufd_uptodate) if (update_ufd_array(self) == 0) return NULL; + self->poll_running = 1; + /* call poll() */ Py_BEGIN_ALLOW_THREADS poll_result = poll(self->ufds, self->ufd_len, timeout); Py_END_ALLOW_THREADS + self->poll_running = 0; + if (poll_result < 0) { PyErr_SetFromErrno(PyExc_OSError); return NULL; @@ -614,6 +626,7 @@ array pointed to by ufds matches the contents of the dictionary. */ self->ufd_uptodate = 0; self->ufds = NULL; + self->poll_running = 0; self->dict = PyDict_New(); if (self->dict == NULL) { Py_DECREF(self); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 20 19:59:36 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Tue, 20 Aug 2013 19:59:36 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=238865=3A_Concurrent_invocation_of_select=2Epoll?= =?utf-8?q?=2Epoll=28=29_now_raises_a?= Message-ID: <3cKKX83MqGz7Ljv@mail.python.org> http://hg.python.org/cpython/rev/4543408e2ba6 changeset: 85282:4543408e2ba6 parent: 85279:6c9d49b8e3ec parent: 85281:072ba5df77e4 user: Serhiy Storchaka date: Tue Aug 20 20:50:32 2013 +0300 summary: Issue #8865: Concurrent invocation of select.poll.poll() now raises a RuntimeError exception. Patch by Christian Schubert. files: Lib/test/test_poll.py | 42 ++++++++++++++++++++++++++++- Misc/ACKS | 1 + Misc/NEWS | 3 ++ Modules/selectmodule.c | 13 +++++++++ 4 files changed, 57 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_poll.py b/Lib/test/test_poll.py --- a/Lib/test/test_poll.py +++ b/Lib/test/test_poll.py @@ -1,8 +1,16 @@ # Test case for the os.poll() function -import os, select, random, unittest, subprocess +import os +import random +import select import _testcapi -from test.support import TESTFN, run_unittest +try: + import threading +except ImportError: + threading = None +import time +import unittest +from test.support import TESTFN, run_unittest, reap_threads try: select.poll @@ -161,6 +169,36 @@ self.assertRaises(OverflowError, pollster.poll, _testcapi.INT_MAX + 1) self.assertRaises(OverflowError, pollster.poll, _testcapi.UINT_MAX + 1) + @unittest.skipUnless(threading, 'Threading required for this test.') + @reap_threads + def test_threaded_poll(self): + r, w = os.pipe() + self.addCleanup(os.close, r) + self.addCleanup(os.close, w) + rfds = [] + for i in range(10): + fd = os.dup(r) + self.addCleanup(os.close, fd) + rfds.append(fd) + pollster = select.poll() + for fd in rfds: + pollster.register(fd, select.POLLIN) + + t = threading.Thread(target=pollster.poll) + t.start() + try: + time.sleep(0.5) + # trigger ufds array reallocation + for fd in rfds: + pollster.unregister(fd) + pollster.register(w, select.POLLOUT) + self.assertRaises(RuntimeError, pollster.poll) + finally: + # and make the call to poll() from the thread return + os.write(w, b'spam') + t.join() + + def test_main(): run_unittest(PollTests) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1131,6 +1131,7 @@ Scott Schram Robin Schreiber Chad J. Schroeder +Christian Schubert Sam Schulenburg Stefan Schwarzer Dietmar Schwertberger diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -38,6 +38,9 @@ Library ------- +- Issue #8865: Concurrent invocation of select.poll.poll() now raises a + RuntimeError exception. Patch by Christian Schubert. + - Issue #18777: The ssl module now uses the new CRYPTO_THREADID API of OpenSSL 1.0.0+ instead of the deprecated CRYPTO id callback function. diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -327,6 +327,7 @@ int ufd_uptodate; int ufd_len; struct pollfd *ufds; + int poll_running; } pollObject; static PyTypeObject poll_Type; @@ -523,16 +524,27 @@ return NULL; } + /* Avoid concurrent poll() invocation, issue 8865 */ + if (self->poll_running) { + PyErr_SetString(PyExc_RuntimeError, + "concurrent poll() invocation"); + return NULL; + } + /* Ensure the ufd array is up to date */ if (!self->ufd_uptodate) if (update_ufd_array(self) == 0) return NULL; + self->poll_running = 1; + /* call poll() */ Py_BEGIN_ALLOW_THREADS poll_result = poll(self->ufds, self->ufd_len, timeout); Py_END_ALLOW_THREADS + self->poll_running = 0; + if (poll_result < 0) { PyErr_SetFromErrno(PyExc_OSError); return NULL; @@ -609,6 +621,7 @@ array pointed to by ufds matches the contents of the dictionary. */ self->ufd_uptodate = 0; self->ufds = NULL; + self->poll_running = 0; self->dict = PyDict_New(); if (self->dict == NULL) { Py_DECREF(self); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Aug 20 19:59:37 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Tue, 20 Aug 2013 19:59:37 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzg4NjU6?= =?utf-8?q?_Concurrent_invocation_of_select=2Epoll=2Epoll=28=29_now_raises?= =?utf-8?q?_a?= Message-ID: <3cKKX96ZJdz7Ljp@mail.python.org> http://hg.python.org/cpython/rev/a4091c1de27a changeset: 85283:a4091c1de27a branch: 2.7 parent: 85280:5e679ef2a55c user: Serhiy Storchaka date: Tue Aug 20 20:38:21 2013 +0300 summary: Issue #8865: Concurrent invocation of select.poll.poll() now raises a RuntimeError exception. Patch by Christian Schubert. files: Lib/test/test_poll.py | 42 ++++++++++++++++++++++++++++- Misc/ACKS | 1 + Misc/NEWS | 3 ++ Modules/selectmodule.c | 13 +++++++++ 4 files changed, 57 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_poll.py b/Lib/test/test_poll.py --- a/Lib/test/test_poll.py +++ b/Lib/test/test_poll.py @@ -1,8 +1,16 @@ # Test case for the os.poll() function -import os, select, random, unittest +import os +import random +import select import _testcapi -from test.test_support import TESTFN, run_unittest +try: + import threading +except ImportError: + threading = None +import time +import unittest +from test.test_support import TESTFN, run_unittest, reap_threads try: select.poll @@ -160,6 +168,36 @@ self.assertRaises(OverflowError, pollster.poll, _testcapi.INT_MAX + 1) self.assertRaises(OverflowError, pollster.poll, _testcapi.UINT_MAX + 1) + @unittest.skipUnless(threading, 'Threading required for this test.') + @reap_threads + def test_threaded_poll(self): + r, w = os.pipe() + self.addCleanup(os.close, r) + self.addCleanup(os.close, w) + rfds = [] + for i in range(10): + fd = os.dup(r) + self.addCleanup(os.close, fd) + rfds.append(fd) + pollster = select.poll() + for fd in rfds: + pollster.register(fd, select.POLLIN) + + t = threading.Thread(target=pollster.poll) + t.start() + try: + time.sleep(0.5) + # trigger ufds array reallocation + for fd in rfds: + pollster.unregister(fd) + pollster.register(w, select.POLLOUT) + self.assertRaises(RuntimeError, pollster.poll) + finally: + # and make the call to poll() from the thread return + os.write(w, b'spam') + t.join() + + def test_main(): run_unittest(PollTests) diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -912,6 +912,7 @@ Arvin Schnell Scott Schram Chad J. Schroeder +Christian Schubert Sam Schulenburg Stefan Schwarzer Dietmar Schwertberger diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -32,6 +32,9 @@ Library ------- +- Issue #8865: Concurrent invocation of select.poll.poll() now raises a + RuntimeError exception. Patch by Christian Schubert. + - Issue #13461: Fix a crash in the TextIOWrapper.tell method on 64-bit platforms. Patch by Yogesh Chaudhari. diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -318,6 +318,7 @@ int ufd_uptodate; int ufd_len; struct pollfd *ufds; + int poll_running; } pollObject; static PyTypeObject poll_Type; @@ -513,16 +514,27 @@ return NULL; } + /* Avoid concurrent poll() invocation, issue 8865 */ + if (self->poll_running) { + PyErr_SetString(PyExc_RuntimeError, + "concurrent poll() invocation"); + return NULL; + } + /* Ensure the ufd array is up to date */ if (!self->ufd_uptodate) if (update_ufd_array(self) == 0) return NULL; + self->poll_running = 1; + /* call poll() */ Py_BEGIN_ALLOW_THREADS poll_result = poll(self->ufds, self->ufd_len, timeout); Py_END_ALLOW_THREADS + self->poll_running = 0; + if (poll_result < 0) { PyErr_SetFromErrno(SelectError); return NULL; @@ -599,6 +611,7 @@ array pointed to by ufds matches the contents of the dictionary. */ self->ufd_uptodate = 0; self->ufds = NULL; + self->poll_running = 0; self->dict = PyDict_New(); if (self->dict == NULL) { Py_DECREF(self); -- Repository URL: http://hg.python.org/cpython From tjreedy at udel.edu Tue Aug 20 21:30:12 2013 From: tjreedy at udel.edu (Terry Reedy) Date: Tue, 20 Aug 2013 15:30:12 -0400 Subject: [Python-checkins] cpython (2.7): Issue #18592: Add docstrings to file being tested (idlelib.SearchDialogBase.py). In-Reply-To: References: <3cJCcM3NGgz7LjQ@mail.python.org> Message-ID: <5213C3C4.4040607@udel.edu> On 8/20/2013 11:58 AM, Ezio Melotti wrote: > On Mon, Aug 19, 2013 at 1:29 AM, terry.reedy Issue #18592: Add docstrings to file being tested > (idlelib.SearchDialogBase.py ). > class SearchDialogBase: > + '''Create most of a modal search dialog (make_frame, > create_widgets). > + > + The wide left column contains: > + 1 or 2 text entry lines (create_entries, make_entry); > + a row of standard radiobuttons (create_option_buttons); This is wrong -- these are Checkbuttons > + a row of dialog specific radiobuttons (create_other_buttons). dialog-specific Radiobuttons > Should this be "radio buttons"? I specifically meant instances of tkinter.Radiobutton. When a capitalized class name is also an English noun (Decimal, Fraction) we can get away with upper-casing or not when referring to instances. Do we have a style rule from when the class names are not? I added a note to the issue for the next edit. From python-checkins at python.org Tue Aug 20 21:39:08 2013 From: python-checkins at python.org (guido.van.rossum) Date: Tue, 20 Aug 2013 21:39:08 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?cGVwczogQWRkIHR1bGlwLndhaXRfZm9yKCku?= Message-ID: <3cKMl04vZBz7LjV@mail.python.org> http://hg.python.org/peps/rev/c3c09d565686 changeset: 5067:c3c09d565686 user: Guido van Rossum date: Tue Aug 20 12:38:58 2013 -0700 summary: Add tulip.wait_for(). files: pep-3156.txt | 6 ++++++ 1 files changed, 6 insertions(+), 0 deletions(-) diff --git a/pep-3156.txt b/pep-3156.txt --- a/pep-3156.txt +++ b/pep-3156.txt @@ -1329,6 +1329,12 @@ your ``for`` loop may not make progress (since you are not allowing other tasks to run). +- ``tulip.wait_for(f, timeout)``. This is a convenience to wait for a + single coroutine or Future with a timeout. It is a simple wrapper + around ``tulip.wait()`` with a single item in the first argument, + returning the result or raising the exception if it is completed + within the timeout, raising ``TimeoutError`` otherwise. + Sleeping -------- -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Tue Aug 20 22:04:01 2013 From: python-checkins at python.org (terry.reedy) Date: Tue, 20 Aug 2013 22:04:01 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP5=3A_Double_word_reported_?= =?utf-8?q?by_Aseem_Bansal_on_python-list=2E?= Message-ID: <3cKNHj0jQqz7LlM@mail.python.org> http://hg.python.org/peps/rev/af47d5be4850 changeset: 5068:af47d5be4850 user: Terry Jan Reedy date: Tue Aug 20 16:03:37 2013 -0400 summary: PEP5: Double word reported by Aseem Bansal on python-list. files: pep-0005.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0005.txt b/pep-0005.txt --- a/pep-0005.txt +++ b/pep-0005.txt @@ -59,7 +59,7 @@ 3. Formally deprecate the obsolete construct in the Python documentation. - 4. Add an an optional warning mode to the parser that will inform + 4. Add an optional warning mode to the parser that will inform users when the deprecated construct is used. In other words, all programs that will behave differently in the future must trigger warnings in this mode. Compile-time warnings are -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Tue Aug 20 22:09:56 2013 From: python-checkins at python.org (christian.heimes) Date: Tue, 20 Aug 2013 22:09:56 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Fix_rev85282=2C_add_missin?= =?utf-8?q?g_import_of_subprocess_module?= Message-ID: <3cKNQX2V8sz7Ljv@mail.python.org> http://hg.python.org/cpython/rev/afb1b4797419 changeset: 85284:afb1b4797419 parent: 85282:4543408e2ba6 user: Christian Heimes date: Tue Aug 20 22:09:41 2013 +0200 summary: Fix rev85282, add missing import of subprocess module files: Lib/test/test_poll.py | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_poll.py b/Lib/test/test_poll.py --- a/Lib/test/test_poll.py +++ b/Lib/test/test_poll.py @@ -1,6 +1,7 @@ # Test case for the os.poll() function import os +import subprocess import random import select import _testcapi -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 01:04:19 2013 From: python-checkins at python.org (victor.stinner) Date: Wed, 21 Aug 2013 01:04:19 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_update_os=2Edup2?= =?utf-8?b?KCk=?= Message-ID: <3cKSHl6zTBz7Ll6@mail.python.org> http://hg.python.org/peps/rev/b72b8b295eb9 changeset: 5069:b72b8b295eb9 user: Victor Stinner date: Wed Aug 21 01:04:11 2013 +0200 summary: PEP 446: update os.dup2() files: pep-0446.txt | 28 +++------------------------- 1 files changed, 3 insertions(+), 25 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -472,7 +472,6 @@ * ``io.open()`` * ``open()`` * ``os.dup()`` - * ``os.dup2()`` * ``os.fdopen()`` * ``os.open()`` * ``os.openpty()`` @@ -513,8 +512,9 @@ process after the ``fork()`` and before ``execv()``, so the inheritable flag of file descriptors is unchanged in the parent process. -* ``os.dup2(fd, fd2)`` makes *fd2* inheritable if *fd2* is ``0`` - (stdin), ``1`` (stdout) or ``2`` (stderr). +* ``os.dup2()`` has a new optional *inheritable* parameter: + ``os.dup2(fd, fd2, inheritable=True)``. *fd2* is created inheritable + by default, but non-inheritable if *inheritable* is ``False``. Since Python should only create non-inheritable file descriptors, it is safe to use subprocess with the *close_fds* parameter set to ``False``. @@ -581,28 +581,6 @@ consensus could be reached. -No special case for standard streams ------------------------------------- - -Functions handling file descriptors should not handle standard streams -(file descriptors ``0``, ``1``, ``2``) differently. - -This option does not work on Windows. On Windows, calling -``SetHandleInformation()`` to set or clear ``HANDLE_FLAG_INHERIT`` flag -on standard streams (0, 1, 2) fails with the Windows error 87 (invalid -argument). If ``os.dup2(fd, fd2)`` would always make *fd2* -non-inheritable, the function would raise an exception when used to -redirect standard streams. - -Another option is to add a new *inheritable* parameter to ``os.dup2()``. - -This PEP has a special-case for ``os.dup2()`` to not break backward -compatibility on applications redirecting standard streams before -calling the C function ``execv()``. Examples in the Python standard -library: ``CGIHTTPRequestHandler.run_cgi()`` and ``pty.fork()`` use -``os.dup2()`` to redict stdin, stdout and stderr. - - Python Issues ============= -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Aug 21 01:37:36 2013 From: python-checkins at python.org (victor.stinner) Date: Wed, 21 Aug 2013 01:37:36 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_add_os=2Eget/set?= =?utf-8?q?=5Fhandle=5Finheritable=28=29_functions?= Message-ID: <3cKT280HgVz7Ljp@mail.python.org> http://hg.python.org/peps/rev/cf1b78227280 changeset: 5070:cf1b78227280 user: Victor Stinner date: Wed Aug 21 01:37:23 2013 +0200 summary: PEP 446: add os.get/set_handle_inheritable() functions files: pep-0446.txt | 14 +++++++++++--- 1 files changed, 11 insertions(+), 3 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -493,15 +493,23 @@ New Functions ------------- +New functions available on all platforms: + * ``os.get_inheritable(fd: int)``: return ``True`` if the file descriptor can be inherited by child processes, ``False`` otherwise. * ``os.set_inheritable(fd: int, inheritable: bool)``: clear or set the inheritable flag of the specified file descriptor. -These new functions are available on all platforms. +New functions only available on Windows: -On Windows, these functions accept also file descriptors of sockets: -the result of ``sockobj.fileno()``. +* ``os.get_handle_inheritable(handle: int)``: return ``True`` if the + handle can be inherited by child processes, ``False`` otherwise. +* ``os.set_handle_inheritable(handle: int, inheritable: bool)``: clear + or set the inheritable flag of the specified handle. + +The ``fileno()`` method of a socket returns a file descriptor on UNIX, +whereas it returns a handle on Windows. So a different function must be +used depending on the platform to manage the inheritable of a socket. Other Changes -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Aug 21 02:42:01 2013 From: python-checkins at python.org (victor.stinner) Date: Wed, 21 Aug 2013 02:42:01 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_446=3A_add_get/set=5Finhe?= =?utf-8?q?ritable=28=29_methods_to_socket=2Esocket?= Message-ID: <3cKVST5g6bz7Ljj@mail.python.org> http://hg.python.org/peps/rev/917946bdd8ad changeset: 5071:917946bdd8ad user: Victor Stinner date: Wed Aug 21 02:41:49 2013 +0200 summary: PEP 446: add get/set_inheritable() methods to socket.socket files: pep-0446.txt | 13 ++++++++----- 1 files changed, 8 insertions(+), 5 deletions(-) diff --git a/pep-0446.txt b/pep-0446.txt --- a/pep-0446.txt +++ b/pep-0446.txt @@ -490,8 +490,8 @@ required when atomic flags are not available. -New Functions -------------- +New Functions And Methods +------------------------- New functions available on all platforms: @@ -507,9 +507,12 @@ * ``os.set_handle_inheritable(handle: int, inheritable: bool)``: clear or set the inheritable flag of the specified handle. -The ``fileno()`` method of a socket returns a file descriptor on UNIX, -whereas it returns a handle on Windows. So a different function must be -used depending on the platform to manage the inheritable of a socket. +New methods: + +* ``socket.socket.get_inheritable()``: return ``True`` if the + socket can be inherited by child processes, ``False`` otherwise. +* ``socket.socket.set_inheritable(inheritable: bool)``: clear + or set the inheritable flag of the specified socket. Other Changes -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Aug 21 02:55:31 2013 From: python-checkins at python.org (barry.warsaw) Date: Wed, 21 Aug 2013 02:55:31 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E6=29=3A_Fix_UnboundLoc?= =?utf-8?q?alError_regression_due_to_previous_incorrect_fix_for?= Message-ID: <3cKVm30m9Zz7LkX@mail.python.org> http://hg.python.org/cpython/rev/84f40562669f changeset: 85285:84f40562669f branch: 2.6 parent: 82288:936621d33c38 user: Barry Warsaw date: Tue Aug 20 20:35:20 2013 -0400 summary: Fix UnboundLocalError regression due to previous incorrect fix for issue 16248. files: Lib/lib-tk/Tkinter.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/lib-tk/Tkinter.py b/Lib/lib-tk/Tkinter.py --- a/Lib/lib-tk/Tkinter.py +++ b/Lib/lib-tk/Tkinter.py @@ -1634,7 +1634,7 @@ # ensure that self.tk is always _something_. self.tk = None if baseName is None: - import sys, os + import os baseName = os.path.basename(sys.argv[0]) baseName, ext = os.path.splitext(baseName) if ext not in ('.py', '.pyc', '.pyo'): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 02:55:32 2013 From: python-checkins at python.org (barry.warsaw) Date: Wed, 21 Aug 2013 02:55:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAobWVyZ2UgMi42IC0+IDIuNyk6?= =?utf-8?q?_Null_merge=2E?= Message-ID: <3cKVm42c1gz7Ll7@mail.python.org> http://hg.python.org/cpython/rev/da5b216916a3 changeset: 85286:da5b216916a3 branch: 2.7 parent: 85283:a4091c1de27a parent: 85285:84f40562669f user: Barry Warsaw date: Tue Aug 20 20:55:06 2013 -0400 summary: Null merge. files: -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Wed Aug 21 06:10:20 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Wed, 21 Aug 2013 06:10:20 +0200 Subject: [Python-checkins] Daily reference leaks (afb1b4797419): sum=2 Message-ID: results for afb1b4797419 on branch "default" -------------------------------------------- test_support leaked [0, -1, 1] references, sum=0 test_support leaked [0, -1, 3] memory blocks, sum=2 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogJ2r_eq', '-x'] From python-checkins at python.org Wed Aug 21 07:28:41 2013 From: python-checkins at python.org (raymond.hettinger) Date: Wed, 21 Aug 2013 07:28:41 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_18772=3A__Restore_se?= =?utf-8?q?t_dummy_object_back_to_unicode_and_restore_the_identity?= Message-ID: <3cKcqF5Rk3z7Ljt@mail.python.org> http://hg.python.org/cpython/rev/be29efa60b68 changeset: 85287:be29efa60b68 parent: 85284:afb1b4797419 user: Raymond Hettinger date: Tue Aug 20 22:28:24 2013 -0700 summary: Issue 18772: Restore set dummy object back to unicode and restore the identity checks in lookkey(). The Gdb prettyprint plugin depended on the dummy object being displayable. Other solutions besides a unicode object are possible. For now, get it back up and running. The identity checks in lookkey() need to be there to prevent the dummy object from leaking through Py_RichCompareBool() into user code in the rare circumstance where the dummy's hash value exactly matches the hash value of the actual key being looked up. files: Objects/setobject.c | 8 ++++---- 1 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -95,7 +95,7 @@ entry = &table[i]; if (entry->key == NULL || entry->key == key) return entry; - if (entry->hash == hash) { + if (entry->hash == hash && entry->key != dummy) { startkey = entry->key; Py_INCREF(startkey); cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); @@ -127,7 +127,7 @@ } if (entry->key == key) break; - if (entry->hash == hash) { + if (entry->hash == hash && entry->key != dummy) { startkey = entry->key; Py_INCREF(startkey); cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); @@ -157,7 +157,7 @@ } if (entry->key == key) break; - if (entry->hash == hash) { + if (entry->hash == hash && entry->key != dummy) { startkey = entry->key; Py_INCREF(startkey); cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); @@ -1090,7 +1090,7 @@ PySetObject *so = NULL; if (dummy == NULL) { /* Auto-initialize dummy */ - dummy = _PyObject_New(&PyBaseObject_Type); + dummy = PyUnicode_FromString(""); if (dummy == NULL) return NULL; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 08:03:36 2013 From: python-checkins at python.org (raymond.hettinger) Date: Wed, 21 Aug 2013 08:03:36 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Remove_a_redundant_hash_ta?= =?utf-8?q?ble_probe_=28this_was_artifact_from_an_earlier_draft_of?= Message-ID: <3cKdbX5vFgz7Ljj@mail.python.org> http://hg.python.org/cpython/rev/d7713bfa680a changeset: 85288:d7713bfa680a user: Raymond Hettinger date: Tue Aug 20 23:03:28 2013 -0700 summary: Remove a redundant hash table probe (this was artifact from an earlier draft of the patch). files: Objects/setobject.c | 11 ----------- 1 files changed, 0 insertions(+), 11 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -215,17 +215,6 @@ freeslot = NULL; } - entry = &table[i ^ 1]; - if (entry->key == NULL) - return freeslot == NULL ? entry : freeslot; - if (entry->key == key - || (entry->hash == hash - && entry->key != dummy - && unicode_eq(entry->key, key))) - return entry; - if (entry->key == dummy && freeslot == NULL) - freeslot = entry; - j = i; perturb = hash; while (1) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 10:35:09 2013 From: python-checkins at python.org (raymond.hettinger) Date: Wed, 21 Aug 2013 10:35:09 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Hoist_the_global_dummy_loo?= =?utf-8?q?kup_out_of_the_inner_loop_for_set=5Fmerge=28=29=2E?= Message-ID: <3cKhyP3x8mz7LmV@mail.python.org> http://hg.python.org/cpython/rev/4e79c3ae8a12 changeset: 85289:4e79c3ae8a12 user: Raymond Hettinger date: Wed Aug 21 01:34:18 2013 -0700 summary: Hoist the global dummy lookup out of the inner loop for set_merge(). files: Objects/setobject.c | 4 +++- 1 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -683,6 +683,7 @@ { PySetObject *other; PyObject *key; + PyObject *dummy_entry; Py_hash_t hash; Py_ssize_t i; setentry *entry; @@ -702,12 +703,13 @@ if (set_table_resize(so, (so->used + other->used)*2) != 0) return -1; } + dummy_entry = dummy; for (i = 0; i <= other->mask; i++) { entry = &other->table[i]; key = entry->key; hash = entry->hash; if (key != NULL && - key != dummy) { + key != dummy_entry) { Py_INCREF(key); if (set_insert_key(so, key, hash) == -1) { Py_DECREF(key); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 13:43:30 2013 From: python-checkins at python.org (christian.heimes) Date: Wed, 21 Aug 2013 13:43:30 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzQ3?= =?utf-8?q?=3A_Re-seed_OpenSSL=27s_pseudo-random_number_generator_after_fo?= =?utf-8?b?cmsu?= Message-ID: <3cKn7k077mz7Lmg@mail.python.org> http://hg.python.org/cpython/rev/8e1194c39bed changeset: 85290:8e1194c39bed branch: 3.3 parent: 85281:072ba5df77e4 user: Christian Heimes date: Wed Aug 21 13:26:05 2013 +0200 summary: Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. A pthread_atfork() child handler is used to seeded the PRNG with pid, time and some stack data. files: Lib/test/test_ssl.py | 32 ++++++++++++++ Misc/NEWS | 4 + Modules/_ssl.c | 72 ++++++++++++++++++++++++++++++++ configure | 11 ++++ configure.ac | 1 + pyconfig.h.in | 3 + 6 files changed, 123 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -130,6 +130,38 @@ self.assertRaises(TypeError, ssl.RAND_egd, 'foo', 1) ssl.RAND_add("this is a random string", 75.0) + @unittest.skipUnless(os.name == 'posix', 'requires posix') + def test_random_fork(self): + status = ssl.RAND_status() + if not status: + self.fail("OpenSSL's PRNG has insufficient randomness") + + rfd, wfd = os.pipe() + pid = os.fork() + if pid == 0: + try: + os.close(rfd) + child_random = ssl.RAND_pseudo_bytes(16)[0] + self.assertEqual(len(child_random), 16) + os.write(wfd, child_random) + os.close(wfd) + except BaseException: + os._exit(1) + else: + os._exit(0) + else: + os.close(wfd) + self.addCleanup(os.close, rfd) + _, status = os.waitpid(pid, 0) + self.assertEqual(status, 0) + + child_random = os.read(rfd, 16) + self.assertEqual(len(child_random), 16) + parent_random = ssl.RAND_pseudo_bytes(16)[0] + self.assertEqual(len(parent_random), 16) + + self.assertNotEqual(child_random, parent_random) + def test_parse_cert(self): # note that this uses an 'unofficial' function in _ssl.c, # provided solely for this test, to exercise the certificate diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,10 @@ Library ------- +- Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. + A pthread_atfork() child handler is used to seeded the PRNG with pid, time + and some stack data. + - Issue #8865: Concurrent invocation of select.poll.poll() now raises a RuntimeError exception. Patch by Christian Schubert. diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -18,6 +18,11 @@ #ifdef WITH_THREAD #include "pythread.h" + +#ifdef HAVE_PTHREAD_ATFORK +# include +#endif + #define PySSL_BEGIN_ALLOW_THREADS_S(save) \ do { if (_ssl_locks_count>0) { (save) = PyEval_SaveThread(); } } while (0) #define PySSL_END_ALLOW_THREADS_S(save) \ @@ -2578,7 +2583,69 @@ Returns number of bytes read. Raises SSLError if connection to EGD\n\ fails or if it does not provide enough data to seed PRNG."); +/* Seed OpenSSL's PRNG at fork(), http://bugs.python.org/issue18747 + * + * The child handler seeds the PRNG from pseudo-random data like pid, the + * current time (nanoseconds, miliseconds or seconds) and an uninitialized + * array. The array contains stack variables that are impossible to predict + * on most systems, e.g. function return address (subject to ASLR), the + * stack protection canary and automatic variables. + * The code is inspired by Apache's ssl_rand_seed() function. + * + * Note: + * The code uses pthread_atfork() until Python has a proper atfork API. The + * handlers are not removed from the child process. + */ + +#if defined(HAVE_PTHREAD_ATFORK) && defined(WITH_THREAD) +#define PYSSL_RAND_ATFORK 1 + +static void +PySSL_RAND_atfork_child(void) +{ + struct { + char stack[128]; /* uninitialized (!) stack data, 128 is an + arbitrary number. */ + pid_t pid; /* current pid */ + _PyTime_timeval tp; /* current time */ + } seed; + +#ifdef WITH_VALGRIND + VALGRIND_MAKE_MEM_DEFINED(seed.stack, sizeof(seed.stack)); #endif + seed.pid = getpid(); + _PyTime_gettimeofday(&(seed.tp)); + +#if 0 + fprintf(stderr, "PySSL_RAND_atfork_child() seeds %i bytes in pid %i\n", + (int)sizeof(seed), seed.pid); +#endif + RAND_add((unsigned char *)&seed, sizeof(seed), 0.0); +} + +static int +PySSL_RAND_atfork(void) +{ + static int registered = 0; + int retval; + + if (registered) + return 0; + + retval = pthread_atfork(NULL, /* prepare */ + NULL, /* parent */ + PySSL_RAND_atfork_child); /* child */ + if (retval != 0) { + PyErr_SetFromErrno(PyExc_OSError); + return -1; + } + registered = 1; + return 0; +} +#endif /* HAVE_PTHREAD_ATFORK */ + +#endif /* HAVE_OPENSSL_RAND */ + @@ -2956,5 +3023,10 @@ if (r == NULL || PyModule_AddObject(m, "_OPENSSL_API_VERSION", r)) return NULL; +#ifdef PYSSL_RAND_ATFORK + if (PySSL_RAND_atfork() == -1) + return NULL; +#endif + return m; } diff --git a/configure b/configure --- a/configure +++ b/configure @@ -9794,6 +9794,17 @@ fi done + for ac_func in pthread_atfork +do : + ac_fn_c_check_func "$LINENO" "pthread_atfork" "ac_cv_func_pthread_atfork" +if test "x$ac_cv_func_pthread_atfork" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_PTHREAD_ATFORK 1 +_ACEOF + +fi +done + fi diff --git a/configure.ac b/configure.ac --- a/configure.ac +++ b/configure.ac @@ -2500,6 +2500,7 @@ [Define if pthread_sigmask() does not work on your system.]) ;; esac]) + AC_CHECK_FUNCS(pthread_atfork) fi diff --git a/pyconfig.h.in b/pyconfig.h.in --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -633,6 +633,9 @@ /* Define if your compiler supports function prototype */ #undef HAVE_PROTOTYPES +/* Define to 1 if you have the `pthread_atfork' function. */ +#undef HAVE_PTHREAD_ATFORK + /* Defined for Solaris 2.6 bug in pthread header. */ #undef HAVE_PTHREAD_DESTRUCTOR -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 13:43:31 2013 From: python-checkins at python.org (christian.heimes) Date: Wed, 21 Aug 2013 13:43:31 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318747=3A_Re-seed_OpenSSL=27s_pseudo-random_numb?= =?utf-8?q?er_generator_after_fork=2E?= Message-ID: <3cKn7l3flnz7LmG@mail.python.org> http://hg.python.org/cpython/rev/49e23a3adf26 changeset: 85291:49e23a3adf26 parent: 85289:4e79c3ae8a12 parent: 85290:8e1194c39bed user: Christian Heimes date: Wed Aug 21 13:26:34 2013 +0200 summary: Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. A pthread_atfork() child handler is used to seeded the PRNG with pid, time and some stack data. files: Lib/test/test_ssl.py | 32 ++++++++++++++ Misc/NEWS | 4 + Modules/_ssl.c | 72 ++++++++++++++++++++++++++++++++ configure | 11 ++++ configure.ac | 1 + pyconfig.h.in | 3 + 6 files changed, 123 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -143,6 +143,38 @@ self.assertRaises(TypeError, ssl.RAND_egd, 'foo', 1) ssl.RAND_add("this is a random string", 75.0) + @unittest.skipUnless(os.name == 'posix', 'requires posix') + def test_random_fork(self): + status = ssl.RAND_status() + if not status: + self.fail("OpenSSL's PRNG has insufficient randomness") + + rfd, wfd = os.pipe() + pid = os.fork() + if pid == 0: + try: + os.close(rfd) + child_random = ssl.RAND_pseudo_bytes(16)[0] + self.assertEqual(len(child_random), 16) + os.write(wfd, child_random) + os.close(wfd) + except BaseException: + os._exit(1) + else: + os._exit(0) + else: + os.close(wfd) + self.addCleanup(os.close, rfd) + _, status = os.waitpid(pid, 0) + self.assertEqual(status, 0) + + child_random = os.read(rfd, 16) + self.assertEqual(len(child_random), 16) + parent_random = ssl.RAND_pseudo_bytes(16)[0] + self.assertEqual(len(parent_random), 16) + + self.assertNotEqual(child_random, parent_random) + def test_parse_cert(self): # note that this uses an 'unofficial' function in _ssl.c, # provided solely for this test, to exercise the certificate diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -38,6 +38,10 @@ Library ------- +- Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. + A pthread_atfork() child handler is used to seeded the PRNG with pid, time + and some stack data. + - Issue #8865: Concurrent invocation of select.poll.poll() now raises a RuntimeError exception. Patch by Christian Schubert. diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -18,6 +18,11 @@ #ifdef WITH_THREAD #include "pythread.h" + +#ifdef HAVE_PTHREAD_ATFORK +# include +#endif + #define PySSL_BEGIN_ALLOW_THREADS_S(save) \ do { if (_ssl_locks_count>0) { (save) = PyEval_SaveThread(); } } while (0) #define PySSL_END_ALLOW_THREADS_S(save) \ @@ -2936,7 +2941,69 @@ Returns number of bytes read. Raises SSLError if connection to EGD\n\ fails or if it does not provide enough data to seed PRNG."); +/* Seed OpenSSL's PRNG at fork(), http://bugs.python.org/issue18747 + * + * The child handler seeds the PRNG from pseudo-random data like pid, the + * current time (nanoseconds, miliseconds or seconds) and an uninitialized + * array. The array contains stack variables that are impossible to predict + * on most systems, e.g. function return address (subject to ASLR), the + * stack protection canary and automatic variables. + * The code is inspired by Apache's ssl_rand_seed() function. + * + * Note: + * The code uses pthread_atfork() until Python has a proper atfork API. The + * handlers are not removed from the child process. + */ + +#if defined(HAVE_PTHREAD_ATFORK) && defined(WITH_THREAD) +#define PYSSL_RAND_ATFORK 1 + +static void +PySSL_RAND_atfork_child(void) +{ + struct { + char stack[128]; /* uninitialized (!) stack data, 128 is an + arbitrary number. */ + pid_t pid; /* current pid */ + _PyTime_timeval tp; /* current time */ + } seed; + +#ifdef WITH_VALGRIND + VALGRIND_MAKE_MEM_DEFINED(seed.stack, sizeof(seed.stack)); #endif + seed.pid = getpid(); + _PyTime_gettimeofday(&(seed.tp)); + +#if 0 + fprintf(stderr, "PySSL_RAND_atfork_child() seeds %i bytes in pid %i\n", + (int)sizeof(seed), seed.pid); +#endif + RAND_add((unsigned char *)&seed, sizeof(seed), 0.0); +} + +static int +PySSL_RAND_atfork(void) +{ + static int registered = 0; + int retval; + + if (registered) + return 0; + + retval = pthread_atfork(NULL, /* prepare */ + NULL, /* parent */ + PySSL_RAND_atfork_child); /* child */ + if (retval != 0) { + PyErr_SetFromErrno(PyExc_OSError); + return -1; + } + registered = 1; + return 0; +} +#endif /* HAVE_PTHREAD_ATFORK */ + +#endif /* HAVE_OPENSSL_RAND */ + PyDoc_STRVAR(PySSL_get_default_verify_paths_doc, "get_default_verify_paths() -> tuple\n\ @@ -3549,5 +3616,10 @@ if (r == NULL || PyModule_AddObject(m, "_OPENSSL_API_VERSION", r)) return NULL; +#ifdef PYSSL_RAND_ATFORK + if (PySSL_RAND_atfork() == -1) + return NULL; +#endif + return m; } diff --git a/configure b/configure --- a/configure +++ b/configure @@ -9809,6 +9809,17 @@ fi done + for ac_func in pthread_atfork +do : + ac_fn_c_check_func "$LINENO" "pthread_atfork" "ac_cv_func_pthread_atfork" +if test "x$ac_cv_func_pthread_atfork" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_PTHREAD_ATFORK 1 +_ACEOF + +fi +done + fi diff --git a/configure.ac b/configure.ac --- a/configure.ac +++ b/configure.ac @@ -2512,6 +2512,7 @@ [Define if pthread_sigmask() does not work on your system.]) ;; esac]) + AC_CHECK_FUNCS(pthread_atfork) fi diff --git a/pyconfig.h.in b/pyconfig.h.in --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -633,6 +633,9 @@ /* Define if your compiler supports function prototype */ #undef HAVE_PROTOTYPES +/* Define to 1 if you have the `pthread_atfork' function. */ +#undef HAVE_PTHREAD_ATFORK + /* Defined for Solaris 2.6 bug in pthread header. */ #undef HAVE_PTHREAD_DESTRUCTOR -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 13:43:32 2013 From: python-checkins at python.org (christian.heimes) Date: Wed, 21 Aug 2013 13:43:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NzQ3?= =?utf-8?q?=3A_Re-seed_OpenSSL=27s_pseudo-random_number_generator_after_fo?= =?utf-8?b?cmsu?= Message-ID: <3cKn7m72cwz7LlT@mail.python.org> http://hg.python.org/cpython/rev/2e6aa6c29be2 changeset: 85292:2e6aa6c29be2 branch: 2.7 parent: 85283:a4091c1de27a user: Christian Heimes date: Wed Aug 21 13:26:05 2013 +0200 summary: Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. A pthread_atfork() child handler is used to seeded the PRNG with pid, time and some stack data. files: Misc/NEWS | 4 ++ Modules/_ssl.c | 72 ++++++++++++++++++++++++++++++++++++++ configure | 11 +++++ configure.ac | 1 + pyconfig.h.in | 3 + 5 files changed, 91 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -32,6 +32,10 @@ Library ------- +- Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. + A pthread_atfork() child handler is used to seeded the PRNG with pid, time + and some stack data. + - Issue #8865: Concurrent invocation of select.poll.poll() now raises a RuntimeError exception. Patch by Christian Schubert. diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -18,6 +18,11 @@ #ifdef WITH_THREAD #include "pythread.h" + +#ifdef HAVE_PTHREAD_ATFORK +# include +#endif + #define PySSL_BEGIN_ALLOW_THREADS { \ PyThreadState *_save = NULL; \ if (_ssl_locks_count>0) {_save = PyEval_SaveThread();} @@ -1621,7 +1626,69 @@ Returns number of bytes read. Raises SSLError if connection to EGD\n\ fails or if it does not provide enough data to seed PRNG."); +/* Seed OpenSSL's PRNG at fork(), http://bugs.python.org/issue18747 + * + * The child handler seeds the PRNG from pseudo-random data like pid, the + * current time (nanoseconds, miliseconds or seconds) and an uninitialized + * array. The array contains stack variables that are impossible to predict + * on most systems, e.g. function return address (subject to ASLR), the + * stack protection canary and automatic variables. + * The code is inspired by Apache's ssl_rand_seed() function. + * + * Note: + * The code uses pthread_atfork() until Python has a proper atfork API. The + * handlers are not removed from the child process. + */ + +#if defined(HAVE_PTHREAD_ATFORK) && defined(WITH_THREAD) +#define PYSSL_RAND_ATFORK 1 + +static void +PySSL_RAND_atfork_child(void) +{ + struct { + char stack[128]; /* uninitialized (!) stack data, 128 is an + arbitrary number. */ + pid_t pid; /* current pid */ + time_t time; /* current time */ + } seed; + +#ifdef WITH_VALGRIND + VALGRIND_MAKE_MEM_DEFINED(seed.stack, sizeof(seed.stack)); #endif + seed.pid = getpid(); + seed.time = time(NULL); + +#if 0 + fprintf(stderr, "PySSL_RAND_atfork_child() seeds %i bytes in pid %i\n", + (int)sizeof(seed), seed.pid); +#endif + RAND_add((unsigned char *)&seed, sizeof(seed), 0.0); +} + +static int +PySSL_RAND_atfork(void) +{ + static int registered = 0; + int retval; + + if (registered) + return 0; + + retval = pthread_atfork(NULL, /* prepare */ + NULL, /* parent */ + PySSL_RAND_atfork_child); /* child */ + if (retval != 0) { + PyErr_SetFromErrno(PyExc_OSError); + return -1; + } + registered = 1; + return 0; +} +#endif /* HAVE_PTHREAD_ATFORK */ + +#endif /* HAVE_OPENSSL_RAND */ + /* List of functions exported by this module. */ @@ -1833,4 +1900,9 @@ r = PyString_FromString(SSLeay_version(SSLEAY_VERSION)); if (r == NULL || PyModule_AddObject(m, "OPENSSL_VERSION", r)) return; + +#ifdef PYSSL_RAND_ATFORK + if (PySSL_RAND_atfork() == -1) + return; +#endif } diff --git a/configure b/configure --- a/configure +++ b/configure @@ -9630,6 +9630,17 @@ fi done + for ac_func in pthread_atfork +do : + ac_fn_c_check_func "$LINENO" "pthread_atfork" "ac_cv_func_pthread_atfork" +if test "x$ac_cv_func_pthread_atfork" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_PTHREAD_ATFORK 1 +_ACEOF + +fi +done + fi diff --git a/configure.ac b/configure.ac --- a/configure.ac +++ b/configure.ac @@ -2569,6 +2569,7 @@ [Define if pthread_sigmask() does not work on your system.]) ;; esac]) + AC_CHECK_FUNCS(pthread_atfork) fi diff --git a/pyconfig.h.in b/pyconfig.h.in --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -520,6 +520,9 @@ /* Define if you have GNU PTH threads. */ #undef HAVE_PTH +/* Define to 1 if you have the `pthread_atfork' function. */ +#undef HAVE_PTHREAD_ATFORK + /* Defined for Solaris 2.6 bug in pthread header. */ #undef HAVE_PTHREAD_DESTRUCTOR -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 13:43:34 2013 From: python-checkins at python.org (christian.heimes) Date: Wed, 21 Aug 2013 13:43:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAobWVyZ2UgMi43IC0+IDIuNyk6?= =?utf-8?q?_merge?= Message-ID: <3cKn7p1jgcz7Lmf@mail.python.org> http://hg.python.org/cpython/rev/cb9c780a7c1a changeset: 85293:cb9c780a7c1a branch: 2.7 parent: 85292:2e6aa6c29be2 parent: 85286:da5b216916a3 user: Christian Heimes date: Wed Aug 21 13:43:20 2013 +0200 summary: merge files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 18:27:53 2013 From: python-checkins at python.org (charles-francois.natali) Date: Wed, 21 Aug 2013 18:27:53 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE1MjMz?= =?utf-8?q?=3A_Python_now_guarantees_that_callables_registered_with_the_at?= =?utf-8?q?exit?= Message-ID: <3cKvRs6nJhz7LkB@mail.python.org> http://hg.python.org/cpython/rev/84d74eb7a341 changeset: 85294:84d74eb7a341 branch: 2.7 user: Charles-Fran?ois Natali date: Wed Aug 21 18:25:00 2013 +0200 summary: Issue #15233: Python now guarantees that callables registered with the atexit module will be called in a deterministic order. files: Doc/library/atexit.rst | 13 +++++++------ Misc/NEWS | 3 +++ 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/Doc/library/atexit.rst b/Doc/library/atexit.rst --- a/Doc/library/atexit.rst +++ b/Doc/library/atexit.rst @@ -15,13 +15,14 @@ The :mod:`atexit` module defines a single function to register cleanup functions. Functions thus registered are automatically executed upon normal -interpreter termination. The order in which the functions are called is not -defined; if you have cleanup operations that depend on each other, you should -wrap them in a function and register that one. This keeps :mod:`atexit` simple. +interpreter termination. :mod:`atexit` runs these functions in the *reverse* +order in which they were registered; if you register ``A``, ``B``, and ``C``, +at interpreter termination time they will be run in the order ``C``, ``B``, +``A``. -Note: the functions registered via this module are not called when the program -is killed by a signal not handled by Python, when a Python fatal internal error -is detected, or when :func:`os._exit` is called. +**Note:** The functions registered via this module are not called when the +program is killed by a signal not handled by Python, when a Python fatal +internal error is detected, or when :func:`os._exit` is called. .. index:: single: exitfunc (in sys) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -32,6 +32,9 @@ Library ------- +- Issue #15233: Python now guarantees that callables registered with the atexit + module will be called in a deterministic order. + - Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. A pthread_atfork() child handler is used to seeded the PRNG with pid, time and some stack data. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 20:47:31 2013 From: python-checkins at python.org (richard.oudkerk) Date: Wed, 21 Aug 2013 20:47:31 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318762=3A_Print_de?= =?utf-8?q?bug_info_on_failure_to_create_new_forkserver_process=2E?= Message-ID: <3cKyXz4zb7z7LmB@mail.python.org> http://hg.python.org/cpython/rev/e1fdd79cfb01 changeset: 85295:e1fdd79cfb01 parent: 85291:49e23a3adf26 user: Richard Oudkerk date: Wed Aug 21 19:45:19 2013 +0100 summary: Issue #18762: Print debug info on failure to create new forkserver process. Also modify test code to hopefully avoid deadlock on failure. files: Lib/multiprocessing/forkserver.py | 15 +++++++++++++++ Lib/test/_test_multiprocessing.py | 6 ++++-- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/Lib/multiprocessing/forkserver.py b/Lib/multiprocessing/forkserver.py --- a/Lib/multiprocessing/forkserver.py +++ b/Lib/multiprocessing/forkserver.py @@ -66,6 +66,21 @@ try: reduction.sendfds(client, allfds) return parent_r, parent_w + except OSError: + # XXX This is debugging info for Issue #18762 + import fcntl + L = [] + for fd in allfds: + try: + flags = fcntl.fcntl(fd, fcntl.F_GETFL) + except OSError as e: + L.append((fd, e)) + else: + L.append((fd, flags)) + print('*** connect_to_new_process: %r' % L, file=sys.stderr) + os.close(parent_r) + os.close(parent_w) + raise except: os.close(parent_r) os.close(parent_w) diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -3386,7 +3386,8 @@ if n > 1: p = multiprocessing.Process(target=cls.child, args=(n-1, conn)) p.start() - p.join() + conn.close() + p.join(timeout=5) else: conn.send(len(util._afterfork_registry)) conn.close() @@ -3397,8 +3398,9 @@ old_size = len(util._afterfork_registry) p = multiprocessing.Process(target=self.child, args=(5, w)) p.start() + w.close() new_size = r.recv() - p.join() + p.join(timeout=5) self.assertLessEqual(new_size, old_size) # -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 20:56:36 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Wed, 21 Aug 2013 20:56:36 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE3MTE5?= =?utf-8?q?=3A_Fixed_integer_overflows_when_processing_large_strings_and_t?= =?utf-8?q?uples?= Message-ID: <3cKylS0wSMz7Lk8@mail.python.org> http://hg.python.org/cpython/rev/6bc533d06cf1 changeset: 85296:6bc533d06cf1 branch: 3.3 parent: 85290:8e1194c39bed user: Serhiy Storchaka date: Wed Aug 21 21:38:21 2013 +0300 summary: Issue #17119: Fixed integer overflows when processing large strings and tuples in the tkinter module. files: Lib/test/test_tcl.py | 16 ++++++++++++- Misc/NEWS | 3 ++ Modules/_tkinter.c | 39 ++++++++++++++++++++++++------- 3 files changed, 48 insertions(+), 10 deletions(-) diff --git a/Lib/test/test_tcl.py b/Lib/test/test_tcl.py --- a/Lib/test/test_tcl.py +++ b/Lib/test/test_tcl.py @@ -3,6 +3,7 @@ import unittest import sys import os +import _testcapi from test import support # Skip this test if the _tkinter module wasn't built. @@ -236,8 +237,21 @@ self.assertEqual(split(arg), res, msg=arg) +class BigmemTclTest(unittest.TestCase): + + def setUp(self): + self.interp = Tcl() + + @unittest.skipUnless(_testcapi.INT_MAX < _testcapi.PY_SSIZE_T_MAX, + "needs UINT_MAX < SIZE_MAX") + @support.bigmemtest(size=_testcapi.INT_MAX + 1, memuse=5, dry_run=False) + def test_huge_string(self, size): + value = ' ' * size + self.assertRaises(OverflowError, self.interp.call, 'set', '_', value) + + def test_main(): - support.run_unittest(TclTest, TkinterTest) + support.run_unittest(TclTest, TkinterTest, BigmemTclTest) if __name__ == "__main__": test_main() diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,9 @@ Library ------- +- Issue #17119: Fixed integer overflows when processing large strings and tuples + in the tkinter module. + - Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. A pthread_atfork() child handler is used to seeded the PRNG with pid, time and some stack data. diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -47,6 +47,9 @@ #define PyBool_FromLong PyLong_FromLong #endif +#define CHECK_SIZE(size, elemsize) \ + ((size_t)(size) <= Py_MAX((size_t)INT_MAX, UINT_MAX / (size_t)(elemsize))) + /* Starting with Tcl 8.4, many APIs offer const-correctness. Unfortunately, making _tkinter correct for this API means to break earlier versions. USE_COMPAT_CONST allows to make _tkinter work with both 8.4 and @@ -364,7 +367,7 @@ char **argv = NULL; int fvStore[ARGSZ]; int *fv = NULL; - int argc = 0, fvc = 0, i; + Py_ssize_t argc = 0, fvc = 0, i; char *res = NULL; if (!(tmp = PyList_New(0))) @@ -386,8 +389,12 @@ argc = PyTuple_Size(args); if (argc > ARGSZ) { - argv = (char **)ckalloc(argc * sizeof(char *)); - fv = (int *)ckalloc(argc * sizeof(int)); + if (!CHECK_SIZE(argc, sizeof(char *))) { + PyErr_SetString(PyExc_OverflowError, "tuple is too long"); + goto finally; + } + argv = (char **)ckalloc((size_t)argc * sizeof(char *)); + fv = (int *)ckalloc((size_t)argc * sizeof(int)); if (argv == NULL || fv == NULL) { PyErr_NoMemory(); goto finally; @@ -966,12 +973,18 @@ else if (PyFloat_Check(value)) return Tcl_NewDoubleObj(PyFloat_AS_DOUBLE(value)); else if (PyTuple_Check(value)) { - Tcl_Obj **argv = (Tcl_Obj**) - ckalloc(PyTuple_Size(value)*sizeof(Tcl_Obj*)); - int i; + Tcl_Obj **argv; + Py_ssize_t size, i; + + size = PyTuple_Size(value); + if (!CHECK_SIZE(size, sizeof(Tcl_Obj *))) { + PyErr_SetString(PyExc_OverflowError, "tuple is too long"); + return NULL; + } + argv = (Tcl_Obj **) ckalloc(((size_t)size) * sizeof(Tcl_Obj *)); if(!argv) return 0; - for(i=0;i ARGSZ) { - objv = (Tcl_Obj **)ckalloc(objc * sizeof(char *)); + if (!CHECK_SIZE(objc, sizeof(Tcl_Obj *))) { + PyErr_SetString(PyExc_OverflowError, "tuple is too long"); + return NULL; + } + objv = (Tcl_Obj **)ckalloc(((size_t)objc) * sizeof(Tcl_Obj *)); if (objv == NULL) { PyErr_NoMemory(); objc = 0; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 20:56:37 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Wed, 21 Aug 2013 20:56:37 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2317119=3A_Fixed_integer_overflows_when_processin?= =?utf-8?q?g_large_strings_and_tuples?= Message-ID: <3cKylT3ltdz7Lm8@mail.python.org> http://hg.python.org/cpython/rev/b500daaee7d0 changeset: 85297:b500daaee7d0 parent: 85291:49e23a3adf26 parent: 85296:6bc533d06cf1 user: Serhiy Storchaka date: Wed Aug 21 21:43:08 2013 +0300 summary: Issue #17119: Fixed integer overflows when processing large strings and tuples in the tkinter module. files: Lib/test/test_tcl.py | 16 +++++++++++++++- Misc/NEWS | 3 +++ Modules/_tkinter.c | 29 +++++++++++++++++++++++------ 3 files changed, 41 insertions(+), 7 deletions(-) diff --git a/Lib/test/test_tcl.py b/Lib/test/test_tcl.py --- a/Lib/test/test_tcl.py +++ b/Lib/test/test_tcl.py @@ -3,6 +3,7 @@ import unittest import sys import os +import _testcapi from test import support # Skip this test if the _tkinter module wasn't built. @@ -236,8 +237,21 @@ self.assertEqual(split(arg), res, msg=arg) +class BigmemTclTest(unittest.TestCase): + + def setUp(self): + self.interp = Tcl() + + @unittest.skipUnless(_testcapi.INT_MAX < _testcapi.PY_SSIZE_T_MAX, + "needs UINT_MAX < SIZE_MAX") + @support.bigmemtest(size=_testcapi.INT_MAX + 1, memuse=5, dry_run=False) + def test_huge_string(self, size): + value = ' ' * size + self.assertRaises(OverflowError, self.interp.call, 'set', '_', value) + + def test_main(): - support.run_unittest(TclTest, TkinterTest) + support.run_unittest(TclTest, TkinterTest, BigmemTclTest) if __name__ == "__main__": test_main() diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -38,6 +38,9 @@ Library ------- +- Issue #17119: Fixed integer overflows when processing large strings and tuples + in the tkinter module. + - Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. A pthread_atfork() child handler is used to seeded the PRNG with pid, time and some stack data. diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -47,6 +47,9 @@ #define PyBool_FromLong PyLong_FromLong #endif +#define CHECK_SIZE(size, elemsize) \ + ((size_t)(size) <= Py_MAX((size_t)INT_MAX, UINT_MAX / (size_t)(elemsize))) + /* Starting with Tcl 8.4, many APIs offer const-correctness. Unfortunately, making _tkinter correct for this API means to break earlier versions. USE_COMPAT_CONST allows to make _tkinter work with both 8.4 and @@ -850,12 +853,18 @@ else if (PyFloat_Check(value)) return Tcl_NewDoubleObj(PyFloat_AS_DOUBLE(value)); else if (PyTuple_Check(value)) { - Tcl_Obj **argv = (Tcl_Obj**) - ckalloc(PyTuple_Size(value)*sizeof(Tcl_Obj*)); - int i; + Tcl_Obj **argv; + Py_ssize_t size, i; + + size = PyTuple_Size(value); + if (!CHECK_SIZE(size, sizeof(Tcl_Obj *))) { + PyErr_SetString(PyExc_OverflowError, "tuple is too long"); + return NULL; + } + argv = (Tcl_Obj **) ckalloc(((size_t)size) * sizeof(Tcl_Obj *)); if(!argv) return 0; - for(i=0;i ARGSZ) { - objv = (Tcl_Obj **)ckalloc(objc * sizeof(char *)); + if (!CHECK_SIZE(objc, sizeof(Tcl_Obj *))) { + PyErr_SetString(PyExc_OverflowError, "tuple is too long"); + return NULL; + } + objv = (Tcl_Obj **)ckalloc(((size_t)objc) * sizeof(Tcl_Obj *)); if (objv == NULL) { PyErr_NoMemory(); objc = 0; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 20:56:38 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Wed, 21 Aug 2013 20:56:38 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE3MTE5?= =?utf-8?q?=3A_Fixed_integer_overflows_when_processing_large_Unicode_strin?= =?utf-8?q?gs?= Message-ID: <3cKylV6pJMz7Lmd@mail.python.org> http://hg.python.org/cpython/rev/ee0bb673536c changeset: 85298:ee0bb673536c branch: 2.7 parent: 85294:84d74eb7a341 user: Serhiy Storchaka date: Wed Aug 21 21:46:12 2013 +0300 summary: Issue #17119: Fixed integer overflows when processing large Unicode strings and tuples in the tkinter module. files: Lib/test/test_tcl.py | 17 +++++++++++- Misc/NEWS | 3 ++ Modules/_tkinter.c | 43 ++++++++++++++++++++++++------- 3 files changed, 52 insertions(+), 11 deletions(-) diff --git a/Lib/test/test_tcl.py b/Lib/test/test_tcl.py --- a/Lib/test/test_tcl.py +++ b/Lib/test/test_tcl.py @@ -3,6 +3,7 @@ import unittest import sys import os +import _testcapi from test import test_support from subprocess import Popen, PIPE @@ -245,8 +246,22 @@ self.assertEqual(split(arg), res) +class BigmemTclTest(unittest.TestCase): + + def setUp(self): + self.interp = Tcl() + + @unittest.skipUnless(_testcapi.INT_MAX < _testcapi.PY_SSIZE_T_MAX, + "needs UINT_MAX < SIZE_MAX") + @test_support.precisionbigmemtest(size=_testcapi.INT_MAX + 1, memuse=5, + dry_run=False) + def test_huge_string(self, size): + value = ' ' * size + self.assertRaises(OverflowError, self.interp.call, 'set', '_', value) + + def test_main(): - test_support.run_unittest(TclTest, TkinterTest) + test_support.run_unittest(TclTest, TkinterTest, BigmemTclTest) if __name__ == "__main__": test_main() diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -32,6 +32,9 @@ Library ------- +- Issue #17119: Fixed integer overflows when processing large Unicode strings + and tuples in the tkinter module. + - Issue #15233: Python now guarantees that callables registered with the atexit module will be called in a deterministic order. diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -47,6 +47,10 @@ #define PyBool_FromLong PyInt_FromLong #endif +#define CHECK_SIZE(size, elemsize) \ + ((size_t)(size) <= (size_t)INT_MAX && \ + (size_t)(size) <= UINT_MAX / (size_t)(elemsize)) + /* Starting with Tcl 8.4, many APIs offer const-correctness. Unfortunately, making _tkinter correct for this API means to break earlier versions. USE_COMPAT_CONST allows to make _tkinter work with both 8.4 and @@ -378,7 +382,7 @@ char **argv = NULL; int fvStore[ARGSZ]; int *fv = NULL; - int argc = 0, fvc = 0, i; + Py_ssize_t argc = 0, fvc = 0, i; char *res = NULL; if (!(tmp = PyList_New(0))) @@ -400,8 +404,12 @@ argc = PyTuple_Size(args); if (argc > ARGSZ) { - argv = (char **)ckalloc(argc * sizeof(char *)); - fv = (int *)ckalloc(argc * sizeof(int)); + if (!CHECK_SIZE(argc, sizeof(char *))) { + PyErr_SetString(PyExc_OverflowError, "tuple is too long"); + goto finally; + } + argv = (char **)ckalloc((size_t)argc * sizeof(char *)); + fv = (int *)ckalloc((size_t)argc * sizeof(int)); if (argv == NULL || fv == NULL) { PyErr_NoMemory(); goto finally; @@ -983,12 +991,18 @@ else if (PyFloat_Check(value)) return Tcl_NewDoubleObj(PyFloat_AS_DOUBLE(value)); else if (PyTuple_Check(value)) { - Tcl_Obj **argv = (Tcl_Obj**) - ckalloc(PyTuple_Size(value)*sizeof(Tcl_Obj*)); - int i; + Tcl_Obj **argv; + Py_ssize_t size, i; + + size = PyTuple_Size(value); + if (!CHECK_SIZE(size, sizeof(Tcl_Obj *))) { + PyErr_SetString(PyExc_OverflowError, "tuple is too long"); + return NULL; + } + argv = (Tcl_Obj **) ckalloc(((size_t)size) * sizeof(Tcl_Obj *)); if(!argv) return 0; - for(i=0;i= size) outbuf = (Tcl_UniChar*)ckalloc(allocsize); /* Else overflow occurred, and we take the next exit */ @@ -1198,7 +1217,7 @@ Tkapp_CallArgs(PyObject *args, Tcl_Obj** objStore, int *pobjc) { Tcl_Obj **objv = objStore; - int objc = 0, i; + Py_ssize_t objc = 0, i; if (args == NULL) /* do nothing */; @@ -1213,7 +1232,11 @@ objc = PyTuple_Size(args); if (objc > ARGSZ) { - objv = (Tcl_Obj **)ckalloc(objc * sizeof(char *)); + if (!CHECK_SIZE(objc, sizeof(Tcl_Obj *))) { + PyErr_SetString(PyExc_OverflowError, "tuple is too long"); + return NULL; + } + objv = (Tcl_Obj **)ckalloc(((size_t)objc) * sizeof(Tcl_Obj *)); if (objv == NULL) { PyErr_NoMemory(); objc = 0; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Aug 21 20:56:40 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Wed, 21 Aug 2013 20:56:40 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_default_-=3E_default?= =?utf-8?q?=29=3A_Merge_heads?= Message-ID: <3cKylX33J1z7Ln6@mail.python.org> http://hg.python.org/cpython/rev/6ea8b8671f73 changeset: 85299:6ea8b8671f73 parent: 85297:b500daaee7d0 parent: 85295:e1fdd79cfb01 user: Serhiy Storchaka date: Wed Aug 21 21:52:50 2013 +0300 summary: Merge heads files: Lib/multiprocessing/forkserver.py | 15 +++++++++++++++ Lib/test/_test_multiprocessing.py | 6 ++++-- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/Lib/multiprocessing/forkserver.py b/Lib/multiprocessing/forkserver.py --- a/Lib/multiprocessing/forkserver.py +++ b/Lib/multiprocessing/forkserver.py @@ -66,6 +66,21 @@ try: reduction.sendfds(client, allfds) return parent_r, parent_w + except OSError: + # XXX This is debugging info for Issue #18762 + import fcntl + L = [] + for fd in allfds: + try: + flags = fcntl.fcntl(fd, fcntl.F_GETFL) + except OSError as e: + L.append((fd, e)) + else: + L.append((fd, flags)) + print('*** connect_to_new_process: %r' % L, file=sys.stderr) + os.close(parent_r) + os.close(parent_w) + raise except: os.close(parent_r) os.close(parent_w) diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -3386,7 +3386,8 @@ if n > 1: p = multiprocessing.Process(target=cls.child, args=(n-1, conn)) p.start() - p.join() + conn.close() + p.join(timeout=5) else: conn.send(len(util._afterfork_registry)) conn.close() @@ -3397,8 +3398,9 @@ old_size = len(util._afterfork_registry) p = multiprocessing.Process(target=self.child, args=(5, w)) p.start() + w.close() new_size = r.recv() - p.join() + p.join(timeout=5) self.assertLessEqual(new_size, old_size) # -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 00:21:05 2013 From: python-checkins at python.org (victor.stinner) Date: Thu, 22 Aug 2013 00:21:05 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Close_=2318794=3A_Add_a_fi?= =?utf-8?q?leno=28=29_method_and_a_closed_attribute_to_select=2Edevpoll?= Message-ID: <3cL3HP604Sz7LjN@mail.python.org> http://hg.python.org/cpython/rev/ccbe2132392b changeset: 85300:ccbe2132392b user: Victor Stinner date: Thu Aug 22 00:19:50 2013 +0200 summary: Close #18794: Add a fileno() method and a closed attribute to select.devpoll objects. Add also tests on fileno() method and closed attribute of select.epoll and select.kqueue. files: Doc/library/select.rst | 31 +++++++++ Lib/test/test_devpoll.py | 25 +++++++ Lib/test/test_epoll.py | 25 +++++++ Lib/test/test_kqueue.py | 22 ++++++ Misc/NEWS | 3 + Modules/selectmodule.c | 92 +++++++++++++++++++++++++-- 6 files changed, 191 insertions(+), 7 deletions(-) diff --git a/Doc/library/select.rst b/Doc/library/select.rst --- a/Doc/library/select.rst +++ b/Doc/library/select.rst @@ -147,6 +147,27 @@ object. +.. method:: devpoll.close() + + Close the file descriptor of the polling object. + + .. versionadded:: 3.4 + + +.. attribute:: devpoll.closed + + ``True`` if the polling object is closed. + + .. versionadded:: 3.4 + + +.. method:: devpoll.fileno() + + Return the file descriptor number of the polling object. + + .. versionadded:: 3.4 + + .. method:: devpoll.register(fd[, eventmask]) Register a file descriptor with the polling object. Future calls to the @@ -244,6 +265,11 @@ Close the control file descriptor of the epoll object. +.. attribute:: epoll.closed + + ``True`` if the epoll object is closed. + + .. method:: epoll.fileno() Return the file descriptor number of the control fd. @@ -363,6 +389,11 @@ Close the control file descriptor of the kqueue object. +.. attribute:: kqueue.closed + + ``True`` if the kqueue object is closed. + + .. method:: kqueue.fileno() Return the file descriptor number of the control fd. diff --git a/Lib/test/test_devpoll.py b/Lib/test/test_devpoll.py --- a/Lib/test/test_devpoll.py +++ b/Lib/test/test_devpoll.py @@ -87,6 +87,31 @@ self.assertRaises(OverflowError, pollster.poll, 1 << 63) self.assertRaises(OverflowError, pollster.poll, 1 << 64) + def test_close(self): + open_file = open(__file__, "rb") + self.addCleanup(open_file.close) + fd = open_file.fileno() + devpoll = select.devpoll() + + # test fileno() method and closed attribute + self.assertIsInstance(devpoll.fileno(), int) + self.assertFalse(devpoll.closed) + + # test close() + devpoll.close() + self.assertTrue(devpoll.closed) + self.assertRaises(ValueError, devpoll.fileno) + + # close() can be called more than once + devpoll.close() + + # operations must fail with ValueError("I/O operation on closed ...") + self.assertRaises(ValueError, devpoll.modify, fd, select.POLLIN) + self.assertRaises(ValueError, devpoll.poll) + self.assertRaises(ValueError, devpoll.register, fd, fd, select.POLLIN) + self.assertRaises(ValueError, devpoll.unregister, fd) + + def test_main(): run_unittest(DevPollTests) diff --git a/Lib/test/test_epoll.py b/Lib/test/test_epoll.py --- a/Lib/test/test_epoll.py +++ b/Lib/test/test_epoll.py @@ -225,6 +225,31 @@ server.close() ep.unregister(fd) + def test_close(self): + open_file = open(__file__, "rb") + self.addCleanup(open_file.close) + fd = open_file.fileno() + epoll = select.epoll() + + # test fileno() method and closed attribute + self.assertIsInstance(epoll.fileno(), int) + self.assertFalse(epoll.closed) + + # test close() + epoll.close() + self.assertTrue(epoll.closed) + self.assertRaises(ValueError, epoll.fileno) + + # close() can be called more than once + epoll.close() + + # operations must fail with ValueError("I/O operation on closed ...") + self.assertRaises(ValueError, epoll.modify, fd, select.EPOLLIN) + self.assertRaises(ValueError, epoll.poll, 1.0) + self.assertRaises(ValueError, epoll.register, fd, select.EPOLLIN) + self.assertRaises(ValueError, epoll.unregister, fd) + + def test_main(): support.run_unittest(TestEPoll) diff --git a/Lib/test/test_kqueue.py b/Lib/test/test_kqueue.py --- a/Lib/test/test_kqueue.py +++ b/Lib/test/test_kqueue.py @@ -185,6 +185,28 @@ b.close() kq.close() + def test_close(self): + open_file = open(__file__, "rb") + self.addCleanup(open_file.close) + fd = open_file.fileno() + kqueue = select.kqueue() + + # test fileno() method and closed attribute + self.assertIsInstance(kqueue.fileno(), int) + self.assertFalse(kqueue.closed) + + # test close() + kqueue.close() + self.assertTrue(kqueue.closed) + self.assertRaises(ValueError, kqueue.fileno) + + # close() can be called more than once + kqueue.close() + + # operations must fail with ValueError("I/O operation on closed ...") + self.assertRaises(ValueError, kqueue.control, None, 4) + + def test_main(): support.run_unittest(TestKQueue) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -38,6 +38,9 @@ Library ------- +- Issue #18794: Add a fileno() method and a closed attribute to select.devpoll + objects. + - Issue #17119: Fixed integer overflows when processing large strings and tuples in the tkinter module. diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -684,6 +684,13 @@ static PyTypeObject devpoll_Type; +static PyObject * +devpoll_err_closed(void) +{ + PyErr_SetString(PyExc_ValueError, "I/O operation on closed devpoll object"); + return NULL; +} + static int devpoll_flush(devpollObject *self) { int size, n; @@ -724,6 +731,9 @@ PyObject *o; int fd, events = POLLIN | POLLPRI | POLLOUT; + if (self->fd_devpoll < 0) + return devpoll_err_closed(); + if (!PyArg_ParseTuple(args, "O|i:register", &o, &events)) { return NULL; } @@ -788,6 +798,9 @@ { int fd; + if (self->fd_devpoll < 0) + return devpoll_err_closed(); + fd = PyObject_AsFileDescriptor( o ); if (fd == -1) return NULL; @@ -817,6 +830,9 @@ long timeout; PyObject *value, *num1, *num2; + if (self->fd_devpoll < 0) + return devpoll_err_closed(); + if (!PyArg_UnpackTuple(args, "poll", 0, 1, &tout)) { return NULL; } @@ -895,6 +911,45 @@ return NULL; } +static PyObject* +devpoll_close(devpollObject *self) +{ + errno = devpoll_internal_close(self); + if (errno < 0) { + PyErr_SetFromErrno(PyExc_OSError); + return NULL; + } + Py_RETURN_NONE; +} + +PyDoc_STRVAR(devpoll_close_doc, +"close() -> None\n\ +\n\ +Close the devpoll file descriptor. Further operations on the devpoll\n\ +object will raise an exception."); + +static PyObject* +devpoll_get_closed(devpollObject *self) +{ + if (self->fd_devpoll < 0) + Py_RETURN_TRUE; + else + Py_RETURN_FALSE; +} + +static PyObject* +devpoll_fileno(devpollObject *self) +{ + if (self->fd_devpoll < 0) + return devpoll_err_closed(); + return PyLong_FromLong(self->fd_devpoll); +} + +PyDoc_STRVAR(devpoll_fileno_doc, +"fileno() -> int\n\ +\n\ +Return the file descriptor."); + static PyMethodDef devpoll_methods[] = { {"register", (PyCFunction)devpoll_register, METH_VARARGS, devpoll_register_doc}, @@ -904,9 +959,19 @@ METH_O, devpoll_unregister_doc}, {"poll", (PyCFunction)devpoll_poll, METH_VARARGS, devpoll_poll_doc}, + {"close", (PyCFunction)devpoll_close, METH_NOARGS, + devpoll_close_doc}, + {"fileno", (PyCFunction)devpoll_fileno, METH_NOARGS, + devpoll_fileno_doc}, {NULL, NULL} /* sentinel */ }; +static PyGetSetDef devpoll_getsetlist[] = { + {"closed", (getter)devpoll_get_closed, NULL, + "True if the devpoll object is closed"}, + {0}, +}; + static devpollObject * newDevPollObject(void) { @@ -957,15 +1022,26 @@ return self; } +static int +devpoll_internal_close(pyEpoll_Object *self) +{ + int save_errno = 0; + if (self->fd_devpoll >= 0) { + int fd = self->fd_devpoll; + self->fd_devpoll = -1; + Py_BEGIN_ALLOW_THREADS + if (close(fd) < 0) + save_errno = errno; + Py_END_ALLOW_THREADS + } + return save_errno; +} + static void devpoll_dealloc(devpollObject *self) { - Py_BEGIN_ALLOW_THREADS - close(self->fd_devpoll); - Py_END_ALLOW_THREADS - + (void)devpoll_internal_close(); PyMem_DEL(self->fds); - PyObject_Del(self); } @@ -1001,6 +1077,8 @@ 0, /*tp_iter*/ 0, /*tp_iternext*/ devpoll_methods, /*tp_methods*/ + 0, /* tp_members */ + devpoll_getsetlist, /* tp_getset */ }; #endif /* HAVE_SYS_DEVPOLL_H */ @@ -1084,7 +1162,7 @@ static PyObject * pyepoll_err_closed(void) { - PyErr_SetString(PyExc_ValueError, "I/O operation on closed epoll fd"); + PyErr_SetString(PyExc_ValueError, "I/O operation on closed epoll object"); return NULL; } @@ -1776,7 +1854,7 @@ static PyObject * kqueue_queue_err_closed(void) { - PyErr_SetString(PyExc_ValueError, "I/O operation on closed kqueue fd"); + PyErr_SetString(PyExc_ValueError, "I/O operation on closed kqueue object"); return NULL; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 00:39:58 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 22 Aug 2013 00:39:58 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318792=3A_Use_=221?= =?utf-8?b?MjcuMC4wLjEiIG9yICI6OjEiIGluc3RlYWQgb2YgImxvY2FsaG9zdCIgYXMg?= =?utf-8?q?much_as?= Message-ID: <3cL3jB3rwRz7LlS@mail.python.org> http://hg.python.org/cpython/rev/085ba7d85eb2 changeset: 85301:085ba7d85eb2 user: Antoine Pitrou date: Thu Aug 22 00:39:46 2013 +0200 summary: Issue #18792: Use "127.0.0.1" or "::1" instead of "localhost" as much as possible, since "localhost" goes through a DNS lookup under recent Windows versions. files: Lib/test/_test_multiprocessing.py | 10 +++++----- Lib/test/support/__init__.py | 6 +++++- Lib/test/test_asyncore.py | 5 ++--- Lib/test/test_ftplib.py | 8 ++++---- Lib/test/test_timeout.py | 2 +- Misc/NEWS | 7 +++++++ 6 files changed, 24 insertions(+), 14 deletions(-) diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -1999,7 +1999,7 @@ authkey = os.urandom(32) manager = QueueManager( - address=('localhost', 0), authkey=authkey, serializer=SERIALIZER + address=(test.support.HOST, 0), authkey=authkey, serializer=SERIALIZER ) manager.start() @@ -2037,7 +2037,7 @@ def test_rapid_restart(self): authkey = os.urandom(32) manager = QueueManager( - address=('localhost', 0), authkey=authkey, serializer=SERIALIZER) + address=(test.support.HOST, 0), authkey=authkey, serializer=SERIALIZER) srvr = manager.get_server() addr = srvr.address # Close the connection.Listener socket which gets opened as a part @@ -2509,7 +2509,7 @@ l.close() l = socket.socket() - l.bind(('localhost', 0)) + l.bind((test.support.HOST, 0)) l.listen(1) conn.send(l.getsockname()) new_conn, addr = l.accept() @@ -3151,9 +3151,9 @@ def test_wait_socket(self, slow=False): from multiprocessing.connection import wait l = socket.socket() - l.bind(('', 0)) + l.bind((test.support.HOST, 0)) l.listen(4) - addr = ('localhost', l.getsockname()[1]) + addr = l.getsockname() readers = [] procs = [] dic = {} diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -480,7 +480,11 @@ return decorator -HOST = 'localhost' +# Don't use "localhost", since resolving it uses the DNS under recent +# Windows versions (see issue #18792). +HOST = "127.0.0.1" +HOSTv6 = "::1" + def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): """Returns an unused port that should be suitable for binding. This is diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py --- a/Lib/test/test_asyncore.py +++ b/Lib/test/test_asyncore.py @@ -10,7 +10,7 @@ import struct from test import support -from test.support import TESTFN, run_unittest, unlink +from test.support import TESTFN, run_unittest, unlink, HOST, HOSTv6 from io import BytesIO from io import StringIO @@ -19,7 +19,6 @@ except ImportError: threading = None -HOST = support.HOST TIMEOUT = 3 HAS_UNIX_SOCKETS = hasattr(socket, 'AF_UNIX') @@ -816,7 +815,7 @@ @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 support required') class TestAPI_UseIPv6Sockets(BaseTestAPI): family = socket.AF_INET6 - addr = ('::1', 0) + addr = (HOSTv6, 0) @unittest.skipUnless(HAS_UNIX_SOCKETS, 'Unix sockets required') class TestAPI_UseUnixSockets(BaseTestAPI): diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py --- a/Lib/test/test_ftplib.py +++ b/Lib/test/test_ftplib.py @@ -18,7 +18,7 @@ from unittest import TestCase from test import support -from test.support import HOST +from test.support import HOST, HOSTv6 threading = support.import_module('threading') TIMEOUT = 3 @@ -767,7 +767,7 @@ class TestIPv6Environment(TestCase): def setUp(self): - self.server = DummyFTPServer(('::1', 0), af=socket.AF_INET6) + self.server = DummyFTPServer((HOSTv6, 0), af=socket.AF_INET6) self.server.start() self.client = ftplib.FTP(timeout=TIMEOUT) self.client.connect(self.server.host, self.server.port) @@ -950,7 +950,7 @@ self.assertTrue(socket.getdefaulttimeout() is None) socket.setdefaulttimeout(30) try: - ftp = ftplib.FTP("localhost") + ftp = ftplib.FTP(HOST) finally: socket.setdefaulttimeout(None) self.assertEqual(ftp.sock.gettimeout(), 30) @@ -962,7 +962,7 @@ self.assertTrue(socket.getdefaulttimeout() is None) socket.setdefaulttimeout(30) try: - ftp = ftplib.FTP("localhost", timeout=None) + ftp = ftplib.FTP(HOST, timeout=None) finally: socket.setdefaulttimeout(None) self.assertTrue(ftp.sock.gettimeout() is None) diff --git a/Lib/test/test_timeout.py b/Lib/test/test_timeout.py --- a/Lib/test/test_timeout.py +++ b/Lib/test/test_timeout.py @@ -110,7 +110,7 @@ # solution. fuzz = 2.0 - localhost = '127.0.0.1' + localhost = support.HOST def setUp(self): raise NotImplementedError() diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -110,6 +110,13 @@ - Issue #8860: Fixed rounding in timedelta constructor. +Tests +----- + +- Issue #18792: Use "127.0.0.1" or "::1" instead of "localhost" as much as + possible, since "localhost" goes through a DNS lookup under recent Windows + versions. + What's New in Python 3.4.0 Alpha 1? =================================== -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 00:49:01 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 22 Aug 2013 00:49:01 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4Nzky?= =?utf-8?b?OiBVc2UgIjEyNy4wLjAuMSIgb3IgIjo6MSIgaW5zdGVhZCBvZiAibG9jYWxo?= =?utf-8?q?ost=22_as_much_as?= Message-ID: <3cL3vd1qKyz7Lk2@mail.python.org> http://hg.python.org/cpython/rev/7728b073c77c changeset: 85302:7728b073c77c branch: 3.3 parent: 85296:6bc533d06cf1 user: Antoine Pitrou date: Thu Aug 22 00:39:46 2013 +0200 summary: Issue #18792: Use "127.0.0.1" or "::1" instead of "localhost" as much as possible, since "localhost" goes through a DNS lookup under recent Windows versions. files: Lib/test/support/__init__.py | 6 +++++- Lib/test/test_asyncore.py | 6 ++---- Lib/test/test_ftplib.py | 8 ++++---- Lib/test/test_multiprocessing.py | 10 +++++----- Lib/test/test_timeout.py | 2 +- Misc/NEWS | 4 ++++ 6 files changed, 21 insertions(+), 15 deletions(-) diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -479,7 +479,11 @@ return decorator -HOST = 'localhost' +# Don't use "localhost", since resolving it uses the DNS under recent +# Windows versions (see issue #18792). +HOST = "127.0.0.1" +HOSTv6 = "::1" + def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): """Returns an unused port that should be suitable for binding. This is diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py --- a/Lib/test/test_asyncore.py +++ b/Lib/test/test_asyncore.py @@ -10,7 +10,7 @@ import struct from test import support -from test.support import TESTFN, run_unittest, unlink +from test.support import TESTFN, run_unittest, unlink, HOST, HOSTv6 from io import BytesIO from io import StringIO @@ -19,8 +19,6 @@ except ImportError: threading = None -HOST = support.HOST - HAS_UNIX_SOCKETS = hasattr(socket, 'AF_UNIX') class dummysocket: @@ -809,7 +807,7 @@ @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 support required') class TestAPI_UseIPv6Sockets(BaseTestAPI): family = socket.AF_INET6 - addr = ('::1', 0) + addr = (HOSTv6, 0) @unittest.skipUnless(HAS_UNIX_SOCKETS, 'Unix sockets required') class TestAPI_UseUnixSockets(BaseTestAPI): diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py --- a/Lib/test/test_ftplib.py +++ b/Lib/test/test_ftplib.py @@ -18,7 +18,7 @@ from unittest import TestCase from test import support -from test.support import HOST +from test.support import HOST, HOSTv6 threading = support.import_module('threading') # the dummy data returned by server over the data channel when @@ -766,7 +766,7 @@ class TestIPv6Environment(TestCase): def setUp(self): - self.server = DummyFTPServer(('::1', 0), af=socket.AF_INET6) + self.server = DummyFTPServer((HOSTv6, 0), af=socket.AF_INET6) self.server.start() self.client = ftplib.FTP() self.client.connect(self.server.host, self.server.port) @@ -949,7 +949,7 @@ self.assertTrue(socket.getdefaulttimeout() is None) socket.setdefaulttimeout(30) try: - ftp = ftplib.FTP("localhost") + ftp = ftplib.FTP(HOST) finally: socket.setdefaulttimeout(None) self.assertEqual(ftp.sock.gettimeout(), 30) @@ -961,7 +961,7 @@ self.assertTrue(socket.getdefaulttimeout() is None) socket.setdefaulttimeout(30) try: - ftp = ftplib.FTP("localhost", timeout=None) + ftp = ftplib.FTP(HOST, timeout=None) finally: socket.setdefaulttimeout(None) self.assertTrue(ftp.sock.gettimeout() is None) diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -1968,7 +1968,7 @@ authkey = os.urandom(32) manager = QueueManager( - address=('localhost', 0), authkey=authkey, serializer=SERIALIZER + address=(test.support.HOST, 0), authkey=authkey, serializer=SERIALIZER ) manager.start() @@ -2006,7 +2006,7 @@ def test_rapid_restart(self): authkey = os.urandom(32) manager = QueueManager( - address=('localhost', 0), authkey=authkey, serializer=SERIALIZER) + address=(test.support.HOST, 0), authkey=authkey, serializer=SERIALIZER) srvr = manager.get_server() addr = srvr.address # Close the connection.Listener socket which gets opened as a part @@ -2478,7 +2478,7 @@ l.close() l = socket.socket() - l.bind(('localhost', 0)) + l.bind((test.support.HOST, 0)) l.listen(1) conn.send(l.getsockname()) new_conn, addr = l.accept() @@ -3235,9 +3235,9 @@ def test_wait_socket(self, slow=False): from multiprocessing.connection import wait l = socket.socket() - l.bind(('', 0)) + l.bind((test.support.HOST, 0)) l.listen(4) - addr = ('localhost', l.getsockname()[1]) + addr = l.getsockname() readers = [] procs = [] dic = {} diff --git a/Lib/test/test_timeout.py b/Lib/test/test_timeout.py --- a/Lib/test/test_timeout.py +++ b/Lib/test/test_timeout.py @@ -110,7 +110,7 @@ # solution. fuzz = 2.0 - localhost = '127.0.0.1' + localhost = support.HOST def setUp(self): raise NotImplementedError() diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -289,6 +289,10 @@ Tests ----- +- Issue #18792: Use "127.0.0.1" or "::1" instead of "localhost" as much as + possible, since "localhost" goes through a DNS lookup under recent Windows + versions. + - Issue #1666318: Add a test that shutil.copytree() retains directory permissions. Patch by Catherine Devlin. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 00:49:02 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 22 Aug 2013 00:49:02 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Null_merge?= Message-ID: <3cL3vf3ZY6z7LkS@mail.python.org> http://hg.python.org/cpython/rev/89df1984adda changeset: 85303:89df1984adda parent: 85301:085ba7d85eb2 parent: 85302:7728b073c77c user: Antoine Pitrou date: Thu Aug 22 00:48:49 2013 +0200 summary: Null merge files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 00:58:37 2013 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 22 Aug 2013 00:58:37 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4Nzky?= =?utf-8?b?OiBVc2UgIjEyNy4wLjAuMSIgb3IgIjo6MSIgaW5zdGVhZCBvZiAibG9jYWxo?= =?utf-8?q?ost=22_as_much_as?= Message-ID: <3cL46j6Rs2z7LjP@mail.python.org> http://hg.python.org/cpython/rev/48de8df194d9 changeset: 85304:48de8df194d9 branch: 2.7 parent: 85298:ee0bb673536c user: Antoine Pitrou date: Thu Aug 22 00:39:46 2013 +0200 summary: Issue #18792: Use "127.0.0.1" or "::1" instead of "localhost" as much as possible, since "localhost" goes through a DNS lookup under recent Windows versions. files: Lib/test/test_asyncore.py | 3 +-- Lib/test/test_ftplib.py | 8 ++++---- Lib/test/test_multiprocessing.py | 4 ++-- Lib/test/test_support.py | 7 ++++++- Misc/NEWS | 4 ++++ 5 files changed, 17 insertions(+), 9 deletions(-) diff --git a/Lib/test/test_asyncore.py b/Lib/test/test_asyncore.py --- a/Lib/test/test_asyncore.py +++ b/Lib/test/test_asyncore.py @@ -10,7 +10,7 @@ import struct from test import test_support -from test.test_support import TESTFN, run_unittest, unlink +from test.test_support import TESTFN, run_unittest, unlink, HOST from StringIO import StringIO try: @@ -18,7 +18,6 @@ except ImportError: threading = None -HOST = test_support.HOST class dummysocket: def __init__(self): diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py --- a/Lib/test/test_ftplib.py +++ b/Lib/test/test_ftplib.py @@ -17,7 +17,7 @@ from unittest import TestCase from test import test_support -from test.test_support import HOST +from test.test_support import HOST, HOSTv6 threading = test_support.import_module('threading') @@ -562,7 +562,7 @@ class TestIPv6Environment(TestCase): def setUp(self): - self.server = DummyFTPServer((HOST, 0), af=socket.AF_INET6) + self.server = DummyFTPServer((HOSTv6, 0), af=socket.AF_INET6) self.server.start() self.client = ftplib.FTP() self.client.connect(self.server.host, self.server.port) @@ -713,7 +713,7 @@ self.assertTrue(socket.getdefaulttimeout() is None) socket.setdefaulttimeout(30) try: - ftp = ftplib.FTP("localhost") + ftp = ftplib.FTP(HOST) finally: socket.setdefaulttimeout(None) self.assertEqual(ftp.sock.gettimeout(), 30) @@ -725,7 +725,7 @@ self.assertTrue(socket.getdefaulttimeout() is None) socket.setdefaulttimeout(30) try: - ftp = ftplib.FTP("localhost", timeout=None) + ftp = ftplib.FTP(HOST, timeout=None) finally: socket.setdefaulttimeout(None) self.assertTrue(ftp.sock.gettimeout() is None) diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -1385,7 +1385,7 @@ authkey = os.urandom(32) manager = QueueManager( - address=('localhost', 0), authkey=authkey, serializer=SERIALIZER + address=(test.test_support.HOST, 0), authkey=authkey, serializer=SERIALIZER ) manager.start() @@ -1423,7 +1423,7 @@ def test_rapid_restart(self): authkey = os.urandom(32) manager = QueueManager( - address=('localhost', 0), authkey=authkey, serializer=SERIALIZER) + address=(test.test_support.HOST, 0), authkey=authkey, serializer=SERIALIZER) srvr = manager.get_server() addr = srvr.address # Close the connection.Listener socket which gets opened as a part diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -290,7 +290,12 @@ msg = "Use of the `%s' resource not enabled" % resource raise ResourceDenied(msg) -HOST = 'localhost' + +# Don't use "localhost", since resolving it uses the DNS under recent +# Windows versions (see issue #18792). +HOST = "127.0.0.1" +HOSTv6 = "::1" + def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): """Returns an unused port that should be suitable for binding. This is diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -191,6 +191,10 @@ Tests ----- +- Issue #18792: Use "127.0.0.1" or "::1" instead of "localhost" as much as + possible, since "localhost" goes through a DNS lookup under recent Windows + versions. + - Issue #18357: add tests for dictview set difference. Patch by Fraser Tweedale. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 01:57:18 2013 From: python-checkins at python.org (tim.peters) Date: Thu, 22 Aug 2013 01:57:18 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAobWVyZ2UgMy4yIC0+IDMuMyk6?= =?utf-8?q?_Merge_3=2E2_into_3=2E3=2E?= Message-ID: <3cL5QQ5q9Mz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/a76564388a2b changeset: 85305:a76564388a2b branch: 3.3 parent: 85302:7728b073c77c parent: 83826:b9b521efeba3 user: Tim Peters date: Wed Aug 21 18:52:57 2013 -0500 summary: Merge 3.2 into 3.3. The only file change is adding the v3.2.5 tag to 3.3's .hgtags file. files: .hgtags | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/.hgtags b/.hgtags --- a/.hgtags +++ b/.hgtags @@ -101,6 +101,7 @@ 3d0686d90f55a78f96d9403da2c52dc2411419d0 v3.2.3 b2cb7bc1edb8493c0a78f9331eae3e8fba6a881d v3.2.4rc1 1e10bdeabe3de02f038a63c001911561ac1d13a7 v3.2.4 +cef745775b6583446572cffad704100983db2bea v3.2.5 f1a9a6505731714f0e157453ff850e3b71615c45 v3.3.0a1 2f69db52d6de306cdaef0a0cc00cc823fb350b01 v3.3.0a2 0b53b70a40a00013505eb35e3660057b62be77be v3.3.0a3 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 01:57:20 2013 From: python-checkins at python.org (tim.peters) Date: Thu, 22 Aug 2013 01:57:20 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_3=2E3_into_default=2E?= Message-ID: <3cL5QS0TwWz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/d2b03e3537bf changeset: 85306:d2b03e3537bf parent: 85303:89df1984adda parent: 85305:a76564388a2b user: Tim Peters date: Wed Aug 21 18:55:56 2013 -0500 summary: Merge 3.3 into default. The only file change is adding the v3.2.5 tag to the .hgtags file. files: .hgtags | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/.hgtags b/.hgtags --- a/.hgtags +++ b/.hgtags @@ -101,6 +101,7 @@ 3d0686d90f55a78f96d9403da2c52dc2411419d0 v3.2.3 b2cb7bc1edb8493c0a78f9331eae3e8fba6a881d v3.2.4rc1 1e10bdeabe3de02f038a63c001911561ac1d13a7 v3.2.4 +cef745775b6583446572cffad704100983db2bea v3.2.5 f1a9a6505731714f0e157453ff850e3b71615c45 v3.3.0a1 2f69db52d6de306cdaef0a0cc00cc823fb350b01 v3.3.0a2 0b53b70a40a00013505eb35e3660057b62be77be v3.3.0a3 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 01:59:07 2013 From: python-checkins at python.org (victor.stinner) Date: Thu, 22 Aug 2013 01:59:07 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Use_new_new_stat=2ES=5FISD?= =?utf-8?q?OOR=28=29_function=2C_instead_of_hardcoded_mask?= Message-ID: <3cL5SW5S5cz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/fed15a462aa5 changeset: 85307:fed15a462aa5 user: Victor Stinner date: Thu Aug 22 01:58:04 2013 +0200 summary: Use new new stat.S_ISDOOR() function, instead of hardcoded mask files: Lib/test/subprocessdata/fd_status.py | 3 ++- 1 files changed, 2 insertions(+), 1 deletions(-) diff --git a/Lib/test/subprocessdata/fd_status.py b/Lib/test/subprocessdata/fd_status.py --- a/Lib/test/subprocessdata/fd_status.py +++ b/Lib/test/subprocessdata/fd_status.py @@ -3,6 +3,7 @@ import errno import os +import stat try: _MAXFD = os.sysconf("SC_OPEN_MAX") @@ -19,6 +20,6 @@ continue raise # Ignore Solaris door files - if st.st_mode & 0xF000 != 0xd000: + if not stat.S_ISDOOR(st.st_mode): fds.append(fd) print(','.join(map(str, fds))) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 01:59:08 2013 From: python-checkins at python.org (victor.stinner) Date: Thu, 22 Aug 2013 01:59:08 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Cleanup_test=5Fbuiltin?= Message-ID: <3cL5SY00YTz7LlF@mail.python.org> http://hg.python.org/cpython/rev/0a1e1b929665 changeset: 85308:0a1e1b929665 user: Victor Stinner date: Thu Aug 22 01:58:12 2013 +0200 summary: Cleanup test_builtin files: Lib/test/test_builtin.py | 16 ++++------------ 1 files changed, 4 insertions(+), 12 deletions(-) diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -976,29 +976,25 @@ def write_testfile(self): # NB the first 4 lines are also used to test input, below fp = open(TESTFN, 'w') - try: + self.addCleanup(unlink, TESTFN) + with fp: fp.write('1+1\n') fp.write('The quick brown fox jumps over the lazy dog') fp.write('.\n') fp.write('Dear John\n') fp.write('XXX'*100) fp.write('YYY'*100) - finally: - fp.close() def test_open(self): self.write_testfile() fp = open(TESTFN, 'r') - try: + with fp: self.assertEqual(fp.readline(4), '1+1\n') self.assertEqual(fp.readline(), 'The quick brown fox jumps over the lazy dog.\n') self.assertEqual(fp.readline(4), 'Dear') self.assertEqual(fp.readline(100), ' John\n') self.assertEqual(fp.read(300), 'XXX'*100) self.assertEqual(fp.read(1000), 'YYY'*100) - finally: - fp.close() - unlink(TESTFN) def test_open_default_encoding(self): old_environ = dict(os.environ) @@ -1013,11 +1009,8 @@ self.write_testfile() current_locale_encoding = locale.getpreferredencoding(False) fp = open(TESTFN, 'w') - try: + with fp: self.assertEqual(fp.encoding, current_locale_encoding) - finally: - fp.close() - unlink(TESTFN) finally: os.environ.clear() os.environ.update(old_environ) @@ -1132,7 +1125,6 @@ sys.stdin = savestdin sys.stdout = savestdout fp.close() - unlink(TESTFN) @unittest.skipUnless(pty, "the pty and signal modules must be available") def check_input_tty(self, prompt, terminal_input, stdio_encoding=None): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 02:23:44 2013 From: python-checkins at python.org (victor.stinner) Date: Thu, 22 Aug 2013 02:23:44 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_remove_unused_declaration?= Message-ID: <3cL60w05BDz7LjQ@mail.python.org> http://hg.python.org/cpython/rev/ea4130668150 changeset: 85309:ea4130668150 user: Victor Stinner date: Thu Aug 22 02:23:13 2013 +0200 summary: remove unused declaration files: Python/import.c | 3 --- 1 files changed, 0 insertions(+), 3 deletions(-) diff --git a/Python/import.c b/Python/import.c --- a/Python/import.c +++ b/Python/import.c @@ -24,9 +24,6 @@ /* See _PyImport_FixupExtensionObject() below */ static PyObject *extensions = NULL; -/* Function from Parser/tokenizer.c */ -extern char * PyTokenizer_FindEncodingFilename(int, PyObject *); - /* This table is defined in config.c: */ extern struct _inittab _PyImport_Inittab[]; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 03:14:30 2013 From: python-checkins at python.org (r.david.murray) Date: Thu, 22 Aug 2013 03:14:30 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogIzE4MzI0OiBzZXRf?= =?utf-8?q?payload_now_correctly_handles_binary_input=2E?= Message-ID: <3cL77V2R1Kz7Lmg@mail.python.org> http://hg.python.org/cpython/rev/64e004737837 changeset: 85310:64e004737837 branch: 3.3 parent: 85305:a76564388a2b user: R David Murray date: Wed Aug 21 21:10:31 2013 -0400 summary: #18324: set_payload now correctly handles binary input. This also backs out the previous fixes for for #14360, #1717, and #16564. Those bugs were actually caused by the fact that set_payload didn't decode to str, thus rendering the model inconsistent. This fix does mean the data processed by the encoder functions goes through an extra encode/decode cycle, but it means the model is always consistent. Future API updates will provide a better way to encode payloads, which will bypass this minor de-optimization. Tests by Vajrasky Kok. files: Lib/email/encoders.py | 20 ++-------- Lib/email/message.py | 2 + Lib/test/test_email/test_email.py | 36 +++++++++++++++++++ Misc/NEWS | 3 + 4 files changed, 45 insertions(+), 16 deletions(-) diff --git a/Lib/email/encoders.py b/Lib/email/encoders.py --- a/Lib/email/encoders.py +++ b/Lib/email/encoders.py @@ -28,7 +28,7 @@ Also, add an appropriate Content-Transfer-Encoding header. """ - orig = msg.get_payload() + orig = msg.get_payload(decode=True) encdata = str(_bencode(orig), 'ascii') msg.set_payload(encdata) msg['Content-Transfer-Encoding'] = 'base64' @@ -40,20 +40,16 @@ Also, add an appropriate Content-Transfer-Encoding header. """ - orig = msg.get_payload() - if isinstance(orig, str): - # If it is a string, the model data may have binary data encoded in via - # surrogateescape. Convert back to bytes so we can CTE encode it. - orig = orig.encode('ascii', 'surrogateescape') + orig = msg.get_payload(decode=True) encdata = _qencode(orig) - msg.set_payload(encdata.decode('ascii', 'surrogateescape')) + msg.set_payload(encdata) msg['Content-Transfer-Encoding'] = 'quoted-printable' def encode_7or8bit(msg): """Set the Content-Transfer-Encoding header to 7bit or 8bit.""" - orig = msg.get_payload() + orig = msg.get_payload(decode=True) if orig is None: # There's no payload. For backwards compatibility we use 7bit msg['Content-Transfer-Encoding'] = '7bit' @@ -75,16 +71,8 @@ msg['Content-Transfer-Encoding'] = '8bit' else: msg['Content-Transfer-Encoding'] = '7bit' - if not isinstance(orig, str): - msg.set_payload(orig.decode('ascii', 'surrogateescape')) def encode_noop(msg): """Do nothing.""" - # Well, not quite *nothing*: in Python3 we have to turn bytes into a string - # in our internal surrogateescaped form in order to keep the model - # consistent. - orig = msg.get_payload() - if not isinstance(orig, str): - msg.set_payload(orig.decode('ascii', 'surrogateescape')) diff --git a/Lib/email/message.py b/Lib/email/message.py --- a/Lib/email/message.py +++ b/Lib/email/message.py @@ -275,6 +275,8 @@ Optional charset sets the message's default character set. See set_charset() for details. """ + if isinstance(payload, bytes): + payload = payload.decode('ascii', 'surrogateescape') self._payload = payload if charset is not None: self.set_charset(charset) diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -593,6 +593,42 @@ "attachment; filename*=utf-8''Fu%C3%9Fballer%20%5Bfilename%5D.ppt", msg['Content-Disposition']) + def test_binary_quopri_payload(self): + for charset in ('latin-1', 'ascii'): + msg = Message() + msg['content-type'] = 'text/plain; charset=%s' % charset + msg['content-transfer-encoding'] = 'quoted-printable' + msg.set_payload(b'foo=e6=96=87bar') + self.assertEqual( + msg.get_payload(decode=True), + b'foo\xe6\x96\x87bar', + 'get_payload returns wrong result with charset %s.' % charset) + + def test_binary_base64_payload(self): + for charset in ('latin-1', 'ascii'): + msg = Message() + msg['content-type'] = 'text/plain; charset=%s' % charset + msg['content-transfer-encoding'] = 'base64' + msg.set_payload(b'Zm9v5paHYmFy') + self.assertEqual( + msg.get_payload(decode=True), + b'foo\xe6\x96\x87bar', + 'get_payload returns wrong result with charset %s.' % charset) + + def test_binary_uuencode_payload(self): + for charset in ('latin-1', 'ascii'): + for encoding in ('x-uuencode', 'uuencode', 'uue', 'x-uue'): + msg = Message() + msg['content-type'] = 'text/plain; charset=%s' % charset + msg['content-transfer-encoding'] = encoding + msg.set_payload(b"begin 666 -\n)9F]OYI:'8F%R\n \nend\n") + self.assertEqual( + msg.get_payload(decode=True), + b'foo\xe6\x96\x87bar', + str(('get_payload returns wrong result ', + 'with charset {0} and encoding {1}.')).\ + format(charset, encoding)) + def test_add_header_with_name_only_param(self): msg = Message() msg.add_header('Content-Disposition', 'inline', foo_bar=None) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,9 @@ Library ------- +- Issue #18324: set_payload now correctly handles binary input. This also + supersedes the previous fixes for #14360, #1717, and #16564. + - Issue #17119: Fixed integer overflows when processing large strings and tuples in the tkinter module. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 03:14:31 2013 From: python-checkins at python.org (r.david.murray) Date: Thu, 22 Aug 2013 03:14:31 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Merge_=2318324=3A_set=5Fpayload_now_correctly_handles_bi?= =?utf-8?q?nary_input=2E?= Message-ID: <3cL77W5PBgz7Lkb@mail.python.org> http://hg.python.org/cpython/rev/a4afcf93ef7b changeset: 85311:a4afcf93ef7b parent: 85309:ea4130668150 parent: 85310:64e004737837 user: R David Murray date: Wed Aug 21 21:13:51 2013 -0400 summary: Merge #18324: set_payload now correctly handles binary input. files: Lib/email/encoders.py | 20 ++-------- Lib/email/message.py | 2 + Lib/test/test_email/test_email.py | 36 +++++++++++++++++++ Misc/NEWS | 3 + 4 files changed, 45 insertions(+), 16 deletions(-) diff --git a/Lib/email/encoders.py b/Lib/email/encoders.py --- a/Lib/email/encoders.py +++ b/Lib/email/encoders.py @@ -28,7 +28,7 @@ Also, add an appropriate Content-Transfer-Encoding header. """ - orig = msg.get_payload() + orig = msg.get_payload(decode=True) encdata = str(_bencode(orig), 'ascii') msg.set_payload(encdata) msg['Content-Transfer-Encoding'] = 'base64' @@ -40,20 +40,16 @@ Also, add an appropriate Content-Transfer-Encoding header. """ - orig = msg.get_payload() - if isinstance(orig, str): - # If it is a string, the model data may have binary data encoded in via - # surrogateescape. Convert back to bytes so we can CTE encode it. - orig = orig.encode('ascii', 'surrogateescape') + orig = msg.get_payload(decode=True) encdata = _qencode(orig) - msg.set_payload(encdata.decode('ascii', 'surrogateescape')) + msg.set_payload(encdata) msg['Content-Transfer-Encoding'] = 'quoted-printable' def encode_7or8bit(msg): """Set the Content-Transfer-Encoding header to 7bit or 8bit.""" - orig = msg.get_payload() + orig = msg.get_payload(decode=True) if orig is None: # There's no payload. For backwards compatibility we use 7bit msg['Content-Transfer-Encoding'] = '7bit' @@ -75,16 +71,8 @@ msg['Content-Transfer-Encoding'] = '8bit' else: msg['Content-Transfer-Encoding'] = '7bit' - if not isinstance(orig, str): - msg.set_payload(orig.decode('ascii', 'surrogateescape')) def encode_noop(msg): """Do nothing.""" - # Well, not quite *nothing*: in Python3 we have to turn bytes into a string - # in our internal surrogateescaped form in order to keep the model - # consistent. - orig = msg.get_payload() - if not isinstance(orig, str): - msg.set_payload(orig.decode('ascii', 'surrogateescape')) diff --git a/Lib/email/message.py b/Lib/email/message.py --- a/Lib/email/message.py +++ b/Lib/email/message.py @@ -303,6 +303,8 @@ Optional charset sets the message's default character set. See set_charset() for details. """ + if isinstance(payload, bytes): + payload = payload.decode('ascii', 'surrogateescape') self._payload = payload if charset is not None: self.set_charset(charset) diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -620,6 +620,42 @@ "attachment; filename*=utf-8''Fu%C3%9Fballer%20%5Bfilename%5D.ppt", msg['Content-Disposition']) + def test_binary_quopri_payload(self): + for charset in ('latin-1', 'ascii'): + msg = Message() + msg['content-type'] = 'text/plain; charset=%s' % charset + msg['content-transfer-encoding'] = 'quoted-printable' + msg.set_payload(b'foo=e6=96=87bar') + self.assertEqual( + msg.get_payload(decode=True), + b'foo\xe6\x96\x87bar', + 'get_payload returns wrong result with charset %s.' % charset) + + def test_binary_base64_payload(self): + for charset in ('latin-1', 'ascii'): + msg = Message() + msg['content-type'] = 'text/plain; charset=%s' % charset + msg['content-transfer-encoding'] = 'base64' + msg.set_payload(b'Zm9v5paHYmFy') + self.assertEqual( + msg.get_payload(decode=True), + b'foo\xe6\x96\x87bar', + 'get_payload returns wrong result with charset %s.' % charset) + + def test_binary_uuencode_payload(self): + for charset in ('latin-1', 'ascii'): + for encoding in ('x-uuencode', 'uuencode', 'uue', 'x-uue'): + msg = Message() + msg['content-type'] = 'text/plain; charset=%s' % charset + msg['content-transfer-encoding'] = encoding + msg.set_payload(b"begin 666 -\n)9F]OYI:'8F%R\n \nend\n") + self.assertEqual( + msg.get_payload(decode=True), + b'foo\xe6\x96\x87bar', + str(('get_payload returns wrong result ', + 'with charset {0} and encoding {1}.')).\ + format(charset, encoding)) + def test_add_header_with_name_only_param(self): msg = Message() msg.add_header('Content-Disposition', 'inline', foo_bar=None) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -38,6 +38,9 @@ Library ------- +- Issue #18324: set_payload now correctly handles binary input. This also + supersedes the previous fixes for #14360, #1717, and #16564. + - Issue #18794: Add a fileno() method and a closed attribute to select.devpoll objects. -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Thu Aug 22 06:15:35 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Thu, 22 Aug 2013 06:15:35 +0200 Subject: [Python-checkins] Daily reference leaks (a4afcf93ef7b): sum=0 Message-ID: results for a4afcf93ef7b on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogkCfpIv', '-x'] From python-checkins at python.org Thu Aug 22 12:47:31 2013 From: python-checkins at python.org (richard.oudkerk) Date: Thu, 22 Aug 2013 12:47:31 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_=2318762=3A_Fix_EBAD?= =?utf-8?q?F_error_when_using_forkserver=2E?= Message-ID: <3cLMrg6rCTz7Ljp@mail.python.org> http://hg.python.org/cpython/rev/a94555d5070f changeset: 85312:a94555d5070f user: Richard Oudkerk date: Thu Aug 22 11:38:55 2013 +0100 summary: Issue #18762: Fix EBADF error when using forkserver. files: Lib/multiprocessing/forkserver.py | 55 ++++++++---------- 1 files changed, 25 insertions(+), 30 deletions(-) diff --git a/Lib/multiprocessing/forkserver.py b/Lib/multiprocessing/forkserver.py --- a/Lib/multiprocessing/forkserver.py +++ b/Lib/multiprocessing/forkserver.py @@ -23,11 +23,12 @@ MAXFDS_TO_SEND = 256 UNSIGNED_STRUCT = struct.Struct('Q') # large enough for pid_t +_forkserver_address = None +_forkserver_alive_fd = None _inherited_fds = None _lock = threading.Lock() _preload_modules = ['__main__'] - # # Public function # @@ -56,31 +57,15 @@ ''' if len(fds) + 3 >= MAXFDS_TO_SEND: raise ValueError('too many fds') - address, alive_w = process.current_process()._config['forkserver_info'] with socket.socket(socket.AF_UNIX) as client: - client.connect(address) + client.connect(_forkserver_address) parent_r, child_w = util.pipe() child_r, parent_w = util.pipe() - allfds = [child_r, child_w, alive_w] + allfds = [child_r, child_w, _forkserver_alive_fd] allfds += fds try: reduction.sendfds(client, allfds) return parent_r, parent_w - except OSError: - # XXX This is debugging info for Issue #18762 - import fcntl - L = [] - for fd in allfds: - try: - flags = fcntl.fcntl(fd, fcntl.F_GETFL) - except OSError as e: - L.append((fd, e)) - else: - L.append((fd, flags)) - print('*** connect_to_new_process: %r' % L, file=sys.stderr) - os.close(parent_r) - os.close(parent_w) - raise except: os.close(parent_r) os.close(parent_w) @@ -97,12 +82,13 @@ process will just reuse the forkserver started by its parent, so ensure_running() will do nothing. ''' + global _forkserver_address, _forkserver_alive_fd with _lock: - config = process.current_process()._config - if config.get('forkserver_info') is not None: + if _forkserver_alive_fd is not None: return assert all(type(mod) is str for mod in _preload_modules) + config = process.current_process()._config semaphore_tracker_fd = config['semaphore_tracker_fd'] cmd = ('from multiprocessing.forkserver import main; ' + 'main(%d, %d, %r, **%r)') @@ -122,13 +108,20 @@ # all client processes own the write end of the "alive" pipe; # when they all terminate the read end becomes ready. - alive_r, alive_w = os.pipe() - config['forkserver_info'] = (address, alive_w) - fds_to_pass = [listener.fileno(), alive_r, semaphore_tracker_fd] - cmd %= (listener.fileno(), alive_r, _preload_modules, data) - exe = spawn.get_executable() - args = [exe] + util._args_from_interpreter_flags() + ['-c', cmd] - pid = util.spawnv_passfds(exe, args, fds_to_pass) + alive_r, alive_w = util.pipe() + try: + fds_to_pass = [listener.fileno(), alive_r, semaphore_tracker_fd] + cmd %= (listener.fileno(), alive_r, _preload_modules, data) + exe = spawn.get_executable() + args = [exe] + util._args_from_interpreter_flags() + ['-c', cmd] + pid = util.spawnv_passfds(exe, args, fds_to_pass) + except: + os.close(alive_w) + raise + finally: + os.close(alive_r) + _forkserver_address = address + _forkserver_alive_fd = alive_w def main(listener_fd, alive_r, preload, main_path=None, sys_path=None): @@ -157,6 +150,8 @@ # ignoring SIGCHLD means no need to reap zombie processes handler = signal.signal(signal.SIGCHLD, signal.SIG_IGN) with socket.socket(socket.AF_UNIX, fileno=listener_fd) as listener: + global _forkserver_address + _forkserver_address = listener.getsockname() readers = [listener, alive_r] while True: @@ -191,7 +186,7 @@ # def _serve_one(s, listener, alive_r, handler): - global _inherited_fds + global _inherited_fds, _forkserver_alive_fd # close unnecessary stuff and reset SIGCHLD handler listener.close() @@ -202,7 +197,7 @@ fds = reduction.recvfds(s, MAXFDS_TO_SEND + 1) s.close() assert len(fds) <= MAXFDS_TO_SEND - child_r, child_w, alive_w, *_inherited_fds = fds + child_r, child_w, _forkserver_alive_fd, *_inherited_fds = fds # send pid to client processes write_unsigned(child_w, os.getpid()) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 12:47:33 2013 From: python-checkins at python.org (richard.oudkerk) Date: Thu, 22 Aug 2013 12:47:33 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Stop_making_fork_server_ha?= =?utf-8?q?ve_copy_of_semaphore=5Ftracker=5Ffd=2E?= Message-ID: <3cLMrj2lSbz7Lmg@mail.python.org> http://hg.python.org/cpython/rev/b0b224e0d2b5 changeset: 85313:b0b224e0d2b5 user: Richard Oudkerk date: Thu Aug 22 11:38:57 2013 +0100 summary: Stop making fork server have copy of semaphore_tracker_fd. files: Lib/multiprocessing/forkserver.py | 13 +++++---- Lib/multiprocessing/popen_spawn_posix.py | 6 +++- Lib/multiprocessing/popen_spawn_win32.py | 5 ++- Lib/multiprocessing/semaphore_tracker.py | 14 +++++----- Lib/multiprocessing/spawn.py | 15 ++++++----- 5 files changed, 29 insertions(+), 24 deletions(-) diff --git a/Lib/multiprocessing/forkserver.py b/Lib/multiprocessing/forkserver.py --- a/Lib/multiprocessing/forkserver.py +++ b/Lib/multiprocessing/forkserver.py @@ -10,6 +10,7 @@ from . import connection from . import process from . import reduction +from . import semaphore_tracker from . import spawn from . import util @@ -55,13 +56,14 @@ The calling process should write to data_w the pickled preparation and process data. ''' - if len(fds) + 3 >= MAXFDS_TO_SEND: + if len(fds) + 4 >= MAXFDS_TO_SEND: raise ValueError('too many fds') with socket.socket(socket.AF_UNIX) as client: client.connect(_forkserver_address) parent_r, child_w = util.pipe() child_r, parent_w = util.pipe() - allfds = [child_r, child_w, _forkserver_alive_fd] + allfds = [child_r, child_w, _forkserver_alive_fd, + semaphore_tracker._semaphore_tracker_fd] allfds += fds try: reduction.sendfds(client, allfds) @@ -88,8 +90,6 @@ return assert all(type(mod) is str for mod in _preload_modules) - config = process.current_process()._config - semaphore_tracker_fd = config['semaphore_tracker_fd'] cmd = ('from multiprocessing.forkserver import main; ' + 'main(%d, %d, %r, **%r)') @@ -110,7 +110,7 @@ # when they all terminate the read end becomes ready. alive_r, alive_w = util.pipe() try: - fds_to_pass = [listener.fileno(), alive_r, semaphore_tracker_fd] + fds_to_pass = [listener.fileno(), alive_r] cmd %= (listener.fileno(), alive_r, _preload_modules, data) exe = spawn.get_executable() args = [exe] + util._args_from_interpreter_flags() + ['-c', cmd] @@ -197,7 +197,8 @@ fds = reduction.recvfds(s, MAXFDS_TO_SEND + 1) s.close() assert len(fds) <= MAXFDS_TO_SEND - child_r, child_w, _forkserver_alive_fd, *_inherited_fds = fds + child_r, child_w, _forkserver_alive_fd, stfd, *_inherited_fds = fds + semaphore_tracker._semaphore_tracker_fd = stfd # send pid to client processes write_unsigned(child_w, os.getpid()) diff --git a/Lib/multiprocessing/popen_spawn_posix.py b/Lib/multiprocessing/popen_spawn_posix.py --- a/Lib/multiprocessing/popen_spawn_posix.py +++ b/Lib/multiprocessing/popen_spawn_posix.py @@ -40,7 +40,8 @@ return fd def _launch(self, process_obj): - tracker_fd = current_process()._config['semaphore_tracker_fd'] + from . import semaphore_tracker + tracker_fd = semaphore_tracker._semaphore_tracker_fd self._fds.append(tracker_fd) prep_data = spawn.get_preparation_data(process_obj._name) fp = io.BytesIO() @@ -55,7 +56,8 @@ try: parent_r, child_w = util.pipe() child_r, parent_w = util.pipe() - cmd = spawn.get_command_line() + [str(child_r)] + cmd = spawn.get_command_line(tracker_fd=tracker_fd, + pipe_handle=child_r) self._fds.extend([child_r, child_w]) self.pid = util.spawnv_passfds(spawn.get_executable(), cmd, self._fds) diff --git a/Lib/multiprocessing/popen_spawn_win32.py b/Lib/multiprocessing/popen_spawn_win32.py --- a/Lib/multiprocessing/popen_spawn_win32.py +++ b/Lib/multiprocessing/popen_spawn_win32.py @@ -32,13 +32,14 @@ def __init__(self, process_obj): prep_data = spawn.get_preparation_data(process_obj._name) - cmd = ' '.join('"%s"' % x for x in spawn.get_command_line()) # read end of pipe will be "stolen" by the child process # -- see spawn_main() in spawn.py. rhandle, whandle = _winapi.CreatePipe(None, 0) wfd = msvcrt.open_osfhandle(whandle, 0) - cmd += ' {} {}'.format(os.getpid(), rhandle) + cmd = spawn.get_command_line(parent_pid=os.getpid(), + pipe_handle=rhandle) + cmd = ' '.join('"%s"' % x for x in cmd) with open(wfd, 'wb', closefd=True) as to_child: # start process diff --git a/Lib/multiprocessing/semaphore_tracker.py b/Lib/multiprocessing/semaphore_tracker.py --- a/Lib/multiprocessing/semaphore_tracker.py +++ b/Lib/multiprocessing/semaphore_tracker.py @@ -26,6 +26,7 @@ __all__ = ['ensure_running', 'register', 'unregister'] +_semaphore_tracker_fd = None _lock = threading.Lock() @@ -34,9 +35,9 @@ This can be run from any process. Usually a child process will use the semaphore created by its parent.''' + global _semaphore_tracker_fd with _lock: - config = current_process()._config - if config.get('semaphore_tracker_fd') is not None: + if _semaphore_tracker_fd is not None: return fds_to_pass = [] try: @@ -44,7 +45,7 @@ except Exception: pass cmd = 'from multiprocessing.semaphore_tracker import main; main(%d)' - r, semaphore_tracker_fd = util.pipe() + r, w = util.pipe() try: fds_to_pass.append(r) # process will out live us, so no need to wait on pid @@ -53,10 +54,10 @@ args += ['-c', cmd % r] util.spawnv_passfds(exe, args, fds_to_pass) except: - os.close(semaphore_tracker_fd) + os.close(w) raise else: - config['semaphore_tracker_fd'] = semaphore_tracker_fd + _semaphore_tracker_fd = w finally: os.close(r) @@ -77,8 +78,7 @@ # posix guarantees that writes to a pipe of less than PIPE_BUF # bytes are atomic, and that PIPE_BUF >= 512 raise ValueError('name too long') - fd = current_process()._config['semaphore_tracker_fd'] - nbytes = os.write(fd, msg) + nbytes = os.write(_semaphore_tracker_fd, msg) assert nbytes == len(msg) diff --git a/Lib/multiprocessing/spawn.py b/Lib/multiprocessing/spawn.py --- a/Lib/multiprocessing/spawn.py +++ b/Lib/multiprocessing/spawn.py @@ -66,32 +66,33 @@ sys.exit() -def get_command_line(): +def get_command_line(**kwds): ''' Returns prefix of command line used for spawning a child process ''' if getattr(sys, 'frozen', False): return [sys.executable, '--multiprocessing-fork'] else: - prog = 'from multiprocessing.spawn import spawn_main; spawn_main()' + prog = 'from multiprocessing.spawn import spawn_main; spawn_main(%s)' + prog %= ', '.join('%s=%r' % item for item in kwds.items()) opts = util._args_from_interpreter_flags() return [_python_exe] + opts + ['-c', prog, '--multiprocessing-fork'] -def spawn_main(): +def spawn_main(pipe_handle, parent_pid=None, tracker_fd=None): ''' Run code specifed by data received over pipe ''' assert is_forking(sys.argv) - handle = int(sys.argv[-1]) if sys.platform == 'win32': import msvcrt from .reduction import steal_handle - pid = int(sys.argv[-2]) - new_handle = steal_handle(pid, handle) + new_handle = steal_handle(parent_pid, pipe_handle) fd = msvcrt.open_osfhandle(new_handle, os.O_RDONLY) else: - fd = handle + from . import semaphore_tracker + semaphore_tracker._semaphore_tracker_fd = tracker_fd + fd = pipe_handle exitcode = _main(fd) sys.exit(exitcode) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 12:49:55 2013 From: python-checkins at python.org (ronald.oussoren) Date: Thu, 22 Aug 2013 12:49:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?peps=3A_PEP_447=3A_Slight_rewording_o?= =?utf-8?q?f_the_text?= Message-ID: <3cLMvR6sBqz7Lk2@mail.python.org> http://hg.python.org/peps/rev/cc078e71b111 changeset: 5072:cc078e71b111 user: Ronald Oussoren date: Thu Aug 22 12:49:43 2013 +0200 summary: PEP 447: Slight rewording of the text * Adds a clearer description of what would change to the abstract * Small wording tweaks in other parts * Reflow the document, lines with normal text are now shorter than 80 characters (the output from pybench and the links section still contain longer lines) * Don't use TAB characters for indentation. files: pep-0447.txt | 452 ++++++++++++++++++++------------------ 1 files changed, 242 insertions(+), 210 deletions(-) diff --git a/pep-0447.txt b/pep-0447.txt --- a/pep-0447.txt +++ b/pep-0447.txt @@ -18,6 +18,28 @@ an attribute. This PEP adds an optional ``__locallookup__`` method to a metaclass that can be used to override this behavior. +That is, the MRO walking loop in ``_PyType_Lookup`` and +``super.__getattribute__`` gets changed from:: + + def lookup(mro_list, name): + for cls in mro_list: + if name in cls.__dict__: + return cls.__dict__ + + return NotFound + +to:: + + def lookup(mro_list, name): + for cls in mro_list: + try: + return cls.__locallookup__(name) + except AttributeError: + pass + + return NotFound + + Rationale ========= @@ -30,7 +52,8 @@ attributes even when looking them up using the `super class`_. The new method affects ``object.__getattribute__`` (and -`PyObject_GenericGetAttr`_) as well for consistency. +`PyObject_GenericGetAttr`_) as well for consistency and to have a single +place to implement dynamic attribute resolution for classes. Background ---------- @@ -52,16 +75,22 @@ ==================================== Both ``super.__getattribute__`` and ``object.__getattribute__`` (or -`PyObject_GenericGetAttr`_ in C code) walk an object's MRO and peek in the -class' ``__dict__`` to look up attributes. A way to affect this lookup is -using a method on the meta class for the type, that by default looks up -the name in the class ``__dict__``. +`PyObject_GenericGetAttr`_ and in particular ``_PyType_Lookup`` in C code) +walk an object's MRO and currently peek in the class' ``__dict__`` to look up +attributes. + +With this proposal both lookup methods no longer peek in the class ``__dict__`` +but call the special method ``__locallookup__``, which is a slot defined +on the metaclass. The default implementation of that method looks +up the name the class ``__dict__``, which means that attribute lookup is +unchanged unless a metatype actually defines the new special method. In Python code -------------- A meta type can define a method ``__locallookup__`` that is called during -attribute resolution by both ``super.__getattribute__`` and ``object.__getattribute``:: +attribute resolution by both ``super.__getattribute__`` +and ``object.__getattribute``:: class MetaType(type): def __locallookup__(cls, name): @@ -70,29 +99,30 @@ except KeyError: raise AttributeError(name) from None -The ``__locallookup__`` method has as its arguments a class and the name of the attribute -that is looked up. It should return the value of the attribute without invoking descriptors, -or raise `AttributeError`_ when the name cannot be found. +The ``__locallookup__`` method has as its arguments a class (which is an +instance of the meta type) and the name of the attribute that is looked up. +It should return the value of the attribute without invoking descriptors, +and should raise `AttributeError`_ when the name cannot be found. -The `type`_ class provides a default implementation for ``__locallookup__``, that -looks up the name in the class dictionary. +The `type`_ class provides a default implementation for ``__locallookup__``, +that looks up the name in the class dictionary. Example usage ............. -The code below implements a silly metaclass that redirects attribute lookup to uppercase -versions of names:: +The code below implements a silly metaclass that redirects attribute lookup to +uppercase versions of names:: class UpperCaseAccess (type): def __locallookup__(cls, name): - return cls.__dict__[name.upper()] + return cls.__dict__[name.upper()] class SillyObject (metaclass=UpperCaseAccess): def m(self): - return 42 + return 42 - def M(self): - return "fourtytwo" + def M(self): + return "fourtytwo" obj = SillyObject() assert obj.m() == "fortytwo" @@ -101,44 +131,46 @@ In C code --------- -A new slot ``tp_locallookup`` is added to the ``PyTypeObject`` struct, this slot -corresponds to the ``__locallookup__`` method on `type`_. +A new slot ``tp_locallookup`` is added to the ``PyTypeObject`` struct, this +slot corresponds to the ``__locallookup__`` method on `type`_. The slot has the following prototype:: PyObject* (*locallookupfunc)(PyTypeObject* cls, PyObject* name); -This method should lookup *name* in the namespace of *cls*, without looking at superclasses, -and should not invoke descriptors. The method returns ``NULL`` without setting an exception -when the *name* cannot be found, and returns a new reference otherwise (not a borrowed reference). +This method should lookup *name* in the namespace of *cls*, without looking +at superclasses, and should not invoke descriptors. The method returns ``NULL`` without setting an exception when the *name* cannot be found, and returns a +new reference otherwise (not a borrowed reference). Use of this hook by the interpreter ----------------------------------- -The new method is required for metatypes and as such is defined on `type_`. Both -``super.__getattribute__`` and ``object.__getattribute__``/`PyObject_GenericGetAttr`_ -(through ``_PyType_Lookup``) use the this ``__locallookup__`` method when walking -the MRO. +The new method is required for metatypes and as such is defined on `type_`. +Both ``super.__getattribute__`` and +``object.__getattribute__``/`PyObject_GenericGetAttr`_ +(through ``_PyType_Lookup``) use the this ``__locallookup__`` method when +walking the MRO. Other changes to the implementation ----------------------------------- -The change for `PyObject_GenericGetAttr`_ will be done by changing the private function -``_PyType_Lookup``. This currently returns a borrowed reference, but must return a new -reference when the ``__locallookup__`` method is present. Because of this ``_PyType_Lookup`` -will be renamed to ``_PyType_LookupName``, this will cause compile-time errors for all out-of-tree -users of this private API. +The change for `PyObject_GenericGetAttr`_ will be done by changing the private +function ``_PyType_Lookup``. This currently returns a borrowed reference, but +must return a new reference when the ``__locallookup__`` method is present. +Because of this ``_PyType_Lookup`` will be renamed to ``_PyType_LookupName``, +this will cause compile-time errors for all out-of-tree users of this +private API. -The attribute lookup cache in ``Objects/typeobject.c`` is disabled for classes that have a -metaclass that overrides ``__locallookup__``, because using the cache might not be valid -for such classes. +The attribute lookup cache in ``Objects/typeobject.c`` is disabled for classes +that have a metaclass that overrides ``__locallookup__``, because using the +cache might not be valid for such classes. Performance impact ------------------ -The pybench output below compares an implementation of this PEP with the regular -source tree, both based on changeset a5681f50bae2, run on an idle machine an -Core i7 processor running Centos 6.4. +The pybench output below compares an implementation of this PEP with the +regular source tree, both based on changeset a5681f50bae2, run on an idle +machine an Core i7 processor running Centos 6.4. Even though the machine was idle there were clear differences between runs, I've seen difference in "minimum time" vary from -0.1% to +1.5%, with simular @@ -146,211 +178,211 @@ :: - ------------------------------------------------------------------------------- - PYBENCH 2.1 - ------------------------------------------------------------------------------- - * using CPython 3.4.0a0 (default, Jul 29 2013, 13:01:34) [GCC 4.4.7 20120313 (Red Hat 4.4.7-3)] - * disabled garbage collection - * system check interval set to maximum: 2147483647 - * using timer: time.perf_counter - * timer: resolution=1e-09, implementation=clock_gettime(CLOCK_MONOTONIC) + ------------------------------------------------------------------------------- + PYBENCH 2.1 + ------------------------------------------------------------------------------- + * using CPython 3.4.0a0 (default, Jul 29 2013, 13:01:34) [GCC 4.4.7 20120313 (Red Hat 4.4.7-3)] + * disabled garbage collection + * system check interval set to maximum: 2147483647 + * using timer: time.perf_counter + * timer: resolution=1e-09, implementation=clock_gettime(CLOCK_MONOTONIC) - ------------------------------------------------------------------------------- - Benchmark: pep447.pybench - ------------------------------------------------------------------------------- + ------------------------------------------------------------------------------- + Benchmark: pep447.pybench + ------------------------------------------------------------------------------- - Rounds: 10 - Warp: 10 - Timer: time.perf_counter + Rounds: 10 + Warp: 10 + Timer: time.perf_counter - Machine Details: - Platform ID: Linux-2.6.32-358.114.1.openstack.el6.x86_64-x86_64-with-centos-6.4-Final - Processor: x86_64 + Machine Details: + Platform ID: Linux-2.6.32-358.114.1.openstack.el6.x86_64-x86_64-with-centos-6.4-Final + Processor: x86_64 - Python: - Implementation: CPython - Executable: /tmp/default-pep447/bin/python3 - Version: 3.4.0a0 - Compiler: GCC 4.4.7 20120313 (Red Hat 4.4.7-3) - Bits: 64bit - Build: Jul 29 2013 14:09:12 (#default) - Unicode: UCS4 + Python: + Implementation: CPython + Executable: /tmp/default-pep447/bin/python3 + Version: 3.4.0a0 + Compiler: GCC 4.4.7 20120313 (Red Hat 4.4.7-3) + Bits: 64bit + Build: Jul 29 2013 14:09:12 (#default) + Unicode: UCS4 - ------------------------------------------------------------------------------- - Comparing with: default.pybench - ------------------------------------------------------------------------------- + ------------------------------------------------------------------------------- + Comparing with: default.pybench + ------------------------------------------------------------------------------- - Rounds: 10 - Warp: 10 - Timer: time.perf_counter + Rounds: 10 + Warp: 10 + Timer: time.perf_counter - Machine Details: - Platform ID: Linux-2.6.32-358.114.1.openstack.el6.x86_64-x86_64-with-centos-6.4-Final - Processor: x86_64 + Machine Details: + Platform ID: Linux-2.6.32-358.114.1.openstack.el6.x86_64-x86_64-with-centos-6.4-Final + Processor: x86_64 - Python: - Implementation: CPython - Executable: /tmp/default/bin/python3 - Version: 3.4.0a0 - Compiler: GCC 4.4.7 20120313 (Red Hat 4.4.7-3) - Bits: 64bit - Build: Jul 29 2013 13:01:34 (#default) - Unicode: UCS4 + Python: + Implementation: CPython + Executable: /tmp/default/bin/python3 + Version: 3.4.0a0 + Compiler: GCC 4.4.7 20120313 (Red Hat 4.4.7-3) + Bits: 64bit + Build: Jul 29 2013 13:01:34 (#default) + Unicode: UCS4 - Test minimum run-time average run-time - this other diff this other diff - ------------------------------------------------------------------------------- - BuiltinFunctionCalls: 45ms 44ms +1.3% 45ms 44ms +1.3% - BuiltinMethodLookup: 26ms 27ms -2.4% 27ms 27ms -2.2% - CompareFloats: 33ms 34ms -0.7% 33ms 34ms -1.1% - CompareFloatsIntegers: 66ms 67ms -0.9% 66ms 67ms -0.8% - CompareIntegers: 51ms 50ms +0.9% 51ms 50ms +0.8% - CompareInternedStrings: 34ms 33ms +0.4% 34ms 34ms -0.4% - CompareLongs: 29ms 29ms -0.1% 29ms 29ms -0.0% - CompareStrings: 43ms 44ms -1.8% 44ms 44ms -1.8% - ComplexPythonFunctionCalls: 44ms 42ms +3.9% 44ms 42ms +4.1% - ConcatStrings: 33ms 33ms -0.4% 33ms 33ms -1.0% - CreateInstances: 47ms 48ms -2.9% 47ms 49ms -3.4% - CreateNewInstances: 35ms 36ms -2.5% 36ms 36ms -2.5% - CreateStringsWithConcat: 69ms 70ms -0.7% 69ms 70ms -0.9% - DictCreation: 52ms 50ms +3.1% 52ms 50ms +3.0% - DictWithFloatKeys: 40ms 44ms -10.1% 43ms 45ms -5.8% - DictWithIntegerKeys: 32ms 36ms -11.2% 35ms 37ms -4.6% - DictWithStringKeys: 29ms 34ms -15.7% 35ms 40ms -11.0% - ForLoops: 30ms 29ms +2.2% 30ms 29ms +2.2% - IfThenElse: 38ms 41ms -6.7% 38ms 41ms -6.9% - ListSlicing: 36ms 36ms -0.7% 36ms 37ms -1.3% - NestedForLoops: 43ms 45ms -3.1% 43ms 45ms -3.2% - NestedListComprehensions: 39ms 40ms -1.7% 39ms 40ms -2.1% - NormalClassAttribute: 86ms 82ms +5.1% 86ms 82ms +5.0% - NormalInstanceAttribute: 42ms 42ms +0.3% 42ms 42ms +0.0% - PythonFunctionCalls: 39ms 38ms +3.5% 39ms 38ms +2.8% - PythonMethodCalls: 51ms 49ms +3.0% 51ms 50ms +2.8% - Recursion: 67ms 68ms -1.4% 67ms 68ms -1.4% - SecondImport: 41ms 36ms +12.5% 41ms 36ms +12.6% - SecondPackageImport: 45ms 40ms +13.1% 45ms 40ms +13.2% - SecondSubmoduleImport: 92ms 95ms -2.4% 95ms 98ms -3.6% - SimpleComplexArithmetic: 28ms 28ms -0.1% 28ms 28ms -0.2% - SimpleDictManipulation: 57ms 57ms -1.0% 57ms 58ms -1.0% - SimpleFloatArithmetic: 29ms 28ms +4.7% 29ms 28ms +4.9% - SimpleIntFloatArithmetic: 37ms 41ms -8.5% 37ms 41ms -8.7% - SimpleIntegerArithmetic: 37ms 41ms -9.4% 37ms 42ms -10.2% - SimpleListComprehensions: 33ms 33ms -1.9% 33ms 34ms -2.9% - SimpleListManipulation: 28ms 30ms -4.3% 29ms 30ms -4.1% - SimpleLongArithmetic: 26ms 26ms +0.5% 26ms 26ms +0.5% - SmallLists: 40ms 40ms +0.1% 40ms 40ms +0.1% - SmallTuples: 46ms 47ms -2.4% 46ms 48ms -3.0% - SpecialClassAttribute: 126ms 120ms +4.7% 126ms 121ms +4.4% - SpecialInstanceAttribute: 42ms 42ms +0.6% 42ms 42ms +0.8% - StringMappings: 94ms 91ms +3.9% 94ms 91ms +3.8% - StringPredicates: 48ms 49ms -1.7% 48ms 49ms -2.1% - StringSlicing: 45ms 45ms +1.4% 46ms 45ms +1.5% - TryExcept: 23ms 22ms +4.9% 23ms 22ms +4.8% - TryFinally: 32ms 32ms -0.1% 32ms 32ms +0.1% - TryRaiseExcept: 17ms 17ms +0.9% 17ms 17ms +0.5% - TupleSlicing: 49ms 48ms +1.1% 49ms 49ms +1.0% - WithFinally: 48ms 47ms +2.3% 48ms 47ms +2.4% - WithRaiseExcept: 45ms 44ms +0.8% 45ms 45ms +0.5% - ------------------------------------------------------------------------------- - Totals: 2284ms 2287ms -0.1% 2306ms 2308ms -0.1% + Test minimum run-time average run-time + this other diff this other diff + ------------------------------------------------------------------------------- + BuiltinFunctionCalls: 45ms 44ms +1.3% 45ms 44ms +1.3% + BuiltinMethodLookup: 26ms 27ms -2.4% 27ms 27ms -2.2% + CompareFloats: 33ms 34ms -0.7% 33ms 34ms -1.1% + CompareFloatsIntegers: 66ms 67ms -0.9% 66ms 67ms -0.8% + CompareIntegers: 51ms 50ms +0.9% 51ms 50ms +0.8% + CompareInternedStrings: 34ms 33ms +0.4% 34ms 34ms -0.4% + CompareLongs: 29ms 29ms -0.1% 29ms 29ms -0.0% + CompareStrings: 43ms 44ms -1.8% 44ms 44ms -1.8% + ComplexPythonFunctionCalls: 44ms 42ms +3.9% 44ms 42ms +4.1% + ConcatStrings: 33ms 33ms -0.4% 33ms 33ms -1.0% + CreateInstances: 47ms 48ms -2.9% 47ms 49ms -3.4% + CreateNewInstances: 35ms 36ms -2.5% 36ms 36ms -2.5% + CreateStringsWithConcat: 69ms 70ms -0.7% 69ms 70ms -0.9% + DictCreation: 52ms 50ms +3.1% 52ms 50ms +3.0% + DictWithFloatKeys: 40ms 44ms -10.1% 43ms 45ms -5.8% + DictWithIntegerKeys: 32ms 36ms -11.2% 35ms 37ms -4.6% + DictWithStringKeys: 29ms 34ms -15.7% 35ms 40ms -11.0% + ForLoops: 30ms 29ms +2.2% 30ms 29ms +2.2% + IfThenElse: 38ms 41ms -6.7% 38ms 41ms -6.9% + ListSlicing: 36ms 36ms -0.7% 36ms 37ms -1.3% + NestedForLoops: 43ms 45ms -3.1% 43ms 45ms -3.2% + NestedListComprehensions: 39ms 40ms -1.7% 39ms 40ms -2.1% + NormalClassAttribute: 86ms 82ms +5.1% 86ms 82ms +5.0% + NormalInstanceAttribute: 42ms 42ms +0.3% 42ms 42ms +0.0% + PythonFunctionCalls: 39ms 38ms +3.5% 39ms 38ms +2.8% + PythonMethodCalls: 51ms 49ms +3.0% 51ms 50ms +2.8% + Recursion: 67ms 68ms -1.4% 67ms 68ms -1.4% + SecondImport: 41ms 36ms +12.5% 41ms 36ms +12.6% + SecondPackageImport: 45ms 40ms +13.1% 45ms 40ms +13.2% + SecondSubmoduleImport: 92ms 95ms -2.4% 95ms 98ms -3.6% + SimpleComplexArithmetic: 28ms 28ms -0.1% 28ms 28ms -0.2% + SimpleDictManipulation: 57ms 57ms -1.0% 57ms 58ms -1.0% + SimpleFloatArithmetic: 29ms 28ms +4.7% 29ms 28ms +4.9% + SimpleIntFloatArithmetic: 37ms 41ms -8.5% 37ms 41ms -8.7% + SimpleIntegerArithmetic: 37ms 41ms -9.4% 37ms 42ms -10.2% + SimpleListComprehensions: 33ms 33ms -1.9% 33ms 34ms -2.9% + SimpleListManipulation: 28ms 30ms -4.3% 29ms 30ms -4.1% + SimpleLongArithmetic: 26ms 26ms +0.5% 26ms 26ms +0.5% + SmallLists: 40ms 40ms +0.1% 40ms 40ms +0.1% + SmallTuples: 46ms 47ms -2.4% 46ms 48ms -3.0% + SpecialClassAttribute: 126ms 120ms +4.7% 126ms 121ms +4.4% + SpecialInstanceAttribute: 42ms 42ms +0.6% 42ms 42ms +0.8% + StringMappings: 94ms 91ms +3.9% 94ms 91ms +3.8% + StringPredicates: 48ms 49ms -1.7% 48ms 49ms -2.1% + StringSlicing: 45ms 45ms +1.4% 46ms 45ms +1.5% + TryExcept: 23ms 22ms +4.9% 23ms 22ms +4.8% + TryFinally: 32ms 32ms -0.1% 32ms 32ms +0.1% + TryRaiseExcept: 17ms 17ms +0.9% 17ms 17ms +0.5% + TupleSlicing: 49ms 48ms +1.1% 49ms 49ms +1.0% + WithFinally: 48ms 47ms +2.3% 48ms 47ms +2.4% + WithRaiseExcept: 45ms 44ms +0.8% 45ms 45ms +0.5% + ------------------------------------------------------------------------------- + Totals: 2284ms 2287ms -0.1% 2306ms 2308ms -0.1% - (this=pep447.pybench, other=default.pybench) + (this=pep447.pybench, other=default.pybench) A run of the benchmark suite (with option "-b 2n3") also seems to indicate that the performance impact is minimal:: - Report on Linux fangorn.local 2.6.32-358.114.1.openstack.el6.x86_64 #1 SMP Wed Jul 3 02:11:25 EDT 2013 x86_64 x86_64 - Total CPU cores: 8 + Report on Linux fangorn.local 2.6.32-358.114.1.openstack.el6.x86_64 #1 SMP Wed Jul 3 02:11:25 EDT 2013 x86_64 x86_64 + Total CPU cores: 8 - ### call_method_slots ### - Min: 0.304120 -> 0.282791: 1.08x faster - Avg: 0.304394 -> 0.282906: 1.08x faster - Significant (t=2329.92) - Stddev: 0.00016 -> 0.00004: 4.1814x smaller + ### call_method_slots ### + Min: 0.304120 -> 0.282791: 1.08x faster + Avg: 0.304394 -> 0.282906: 1.08x faster + Significant (t=2329.92) + Stddev: 0.00016 -> 0.00004: 4.1814x smaller - ### call_simple ### - Min: 0.249268 -> 0.221175: 1.13x faster - Avg: 0.249789 -> 0.221387: 1.13x faster - Significant (t=2770.11) - Stddev: 0.00012 -> 0.00013: 1.1101x larger + ### call_simple ### + Min: 0.249268 -> 0.221175: 1.13x faster + Avg: 0.249789 -> 0.221387: 1.13x faster + Significant (t=2770.11) + Stddev: 0.00012 -> 0.00013: 1.1101x larger - ### django_v2 ### - Min: 0.632590 -> 0.601519: 1.05x faster - Avg: 0.635085 -> 0.602653: 1.05x faster - Significant (t=321.32) - Stddev: 0.00087 -> 0.00051: 1.6933x smaller + ### django_v2 ### + Min: 0.632590 -> 0.601519: 1.05x faster + Avg: 0.635085 -> 0.602653: 1.05x faster + Significant (t=321.32) + Stddev: 0.00087 -> 0.00051: 1.6933x smaller - ### fannkuch ### - Min: 1.033181 -> 0.999779: 1.03x faster - Avg: 1.036457 -> 1.001840: 1.03x faster - Significant (t=260.31) - Stddev: 0.00113 -> 0.00070: 1.6112x smaller + ### fannkuch ### + Min: 1.033181 -> 0.999779: 1.03x faster + Avg: 1.036457 -> 1.001840: 1.03x faster + Significant (t=260.31) + Stddev: 0.00113 -> 0.00070: 1.6112x smaller - ### go ### - Min: 0.526714 -> 0.544428: 1.03x slower - Avg: 0.529649 -> 0.547626: 1.03x slower - Significant (t=-93.32) - Stddev: 0.00136 -> 0.00136: 1.0028x smaller + ### go ### + Min: 0.526714 -> 0.544428: 1.03x slower + Avg: 0.529649 -> 0.547626: 1.03x slower + Significant (t=-93.32) + Stddev: 0.00136 -> 0.00136: 1.0028x smaller - ### iterative_count ### - Min: 0.109748 -> 0.116513: 1.06x slower - Avg: 0.109816 -> 0.117202: 1.07x slower - Significant (t=-357.08) - Stddev: 0.00008 -> 0.00019: 2.3664x larger + ### iterative_count ### + Min: 0.109748 -> 0.116513: 1.06x slower + Avg: 0.109816 -> 0.117202: 1.07x slower + Significant (t=-357.08) + Stddev: 0.00008 -> 0.00019: 2.3664x larger - ### json_dump_v2 ### - Min: 2.554462 -> 2.609141: 1.02x slower - Avg: 2.564472 -> 2.620013: 1.02x slower - Significant (t=-76.93) - Stddev: 0.00538 -> 0.00481: 1.1194x smaller + ### json_dump_v2 ### + Min: 2.554462 -> 2.609141: 1.02x slower + Avg: 2.564472 -> 2.620013: 1.02x slower + Significant (t=-76.93) + Stddev: 0.00538 -> 0.00481: 1.1194x smaller - ### meteor_contest ### - Min: 0.196336 -> 0.191925: 1.02x faster - Avg: 0.196878 -> 0.192698: 1.02x faster - Significant (t=61.86) - Stddev: 0.00053 -> 0.00041: 1.2925x smaller + ### meteor_contest ### + Min: 0.196336 -> 0.191925: 1.02x faster + Avg: 0.196878 -> 0.192698: 1.02x faster + Significant (t=61.86) + Stddev: 0.00053 -> 0.00041: 1.2925x smaller - ### nbody ### - Min: 0.228039 -> 0.235551: 1.03x slower - Avg: 0.228857 -> 0.236052: 1.03x slower - Significant (t=-54.15) - Stddev: 0.00130 -> 0.00029: 4.4810x smaller + ### nbody ### + Min: 0.228039 -> 0.235551: 1.03x slower + Avg: 0.228857 -> 0.236052: 1.03x slower + Significant (t=-54.15) + Stddev: 0.00130 -> 0.00029: 4.4810x smaller - ### pathlib ### - Min: 0.108501 -> 0.105339: 1.03x faster - Avg: 0.109084 -> 0.105619: 1.03x faster - Significant (t=311.08) - Stddev: 0.00022 -> 0.00011: 1.9314x smaller + ### pathlib ### + Min: 0.108501 -> 0.105339: 1.03x faster + Avg: 0.109084 -> 0.105619: 1.03x faster + Significant (t=311.08) + Stddev: 0.00022 -> 0.00011: 1.9314x smaller - ### regex_effbot ### - Min: 0.057905 -> 0.056447: 1.03x faster - Avg: 0.058055 -> 0.056760: 1.02x faster - Significant (t=79.22) - Stddev: 0.00006 -> 0.00015: 2.7741x larger + ### regex_effbot ### + Min: 0.057905 -> 0.056447: 1.03x faster + Avg: 0.058055 -> 0.056760: 1.02x faster + Significant (t=79.22) + Stddev: 0.00006 -> 0.00015: 2.7741x larger - ### silent_logging ### - Min: 0.070810 -> 0.072436: 1.02x slower - Avg: 0.070899 -> 0.072609: 1.02x slower - Significant (t=-191.59) - Stddev: 0.00004 -> 0.00008: 2.2640x larger + ### silent_logging ### + Min: 0.070810 -> 0.072436: 1.02x slower + Avg: 0.070899 -> 0.072609: 1.02x slower + Significant (t=-191.59) + Stddev: 0.00004 -> 0.00008: 2.2640x larger - ### spectral_norm ### - Min: 0.290255 -> 0.299286: 1.03x slower - Avg: 0.290335 -> 0.299541: 1.03x slower - Significant (t=-572.10) - Stddev: 0.00005 -> 0.00015: 2.8547x larger + ### spectral_norm ### + Min: 0.290255 -> 0.299286: 1.03x slower + Avg: 0.290335 -> 0.299541: 1.03x slower + Significant (t=-572.10) + Stddev: 0.00005 -> 0.00015: 2.8547x larger - ### threaded_count ### - Min: 0.107215 -> 0.115206: 1.07x slower - Avg: 0.107488 -> 0.115996: 1.08x slower - Significant (t=-109.39) - Stddev: 0.00016 -> 0.00076: 4.8665x larger + ### threaded_count ### + Min: 0.107215 -> 0.115206: 1.07x slower + Avg: 0.107488 -> 0.115996: 1.08x slower + Significant (t=-109.39) + Stddev: 0.00016 -> 0.00076: 4.8665x larger - The following not significant results are hidden, use -v to show them: - call_method, call_method_unknown, chaos, fastpickle, fastunpickle, float, formatted_logging, hexiom2, json_load, normal_startup, nqueens, pidigits, raytrace, regex_compile, regex_v8, richards, simple_logging, startup_nosite, telco, unpack_sequence. + The following not significant results are hidden, use -v to show them: + call_method, call_method_unknown, chaos, fastpickle, fastunpickle, float, formatted_logging, hexiom2, json_load, normal_startup, nqueens, pidigits, raytrace, regex_compile, regex_v8, richards, simple_logging, startup_nosite, telco, unpack_sequence. Alternative proposals -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Aug 22 13:24:13 2013 From: python-checkins at python.org (christian.heimes) Date: Thu, 22 Aug 2013 13:24:13 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzQ3?= =?utf-8?q?=3A_Use_a_parent_atfork_handler_instead_of_a_child_atfork_handl?= =?utf-8?b?ZXIu?= Message-ID: <3cLNg15cbSz7LkB@mail.python.org> http://hg.python.org/cpython/rev/39c9dbed3aa1 changeset: 85314:39c9dbed3aa1 branch: 3.3 parent: 85310:64e004737837 user: Christian Heimes date: Thu Aug 22 13:19:48 2013 +0200 summary: Issue #18747: Use a parent atfork handler instead of a child atfork handler. fork() is suppose to be async-signal safe but the handler calls unsafe functions. A parent handler mitigates the issue. files: Modules/_ssl.c | 21 +++++++++------------ 1 files changed, 9 insertions(+), 12 deletions(-) diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -2585,23 +2585,25 @@ /* Seed OpenSSL's PRNG at fork(), http://bugs.python.org/issue18747 * - * The child handler seeds the PRNG from pseudo-random data like pid, the - * current time (nanoseconds, miliseconds or seconds) and an uninitialized - * array. The array contains stack variables that are impossible to predict + * The parent handler seeds the PRNG from pseudo-random data like pid, the + * current time (miliseconds or seconds) and an uninitialized arry. + * The array contains stack variables that are impossible to predict * on most systems, e.g. function return address (subject to ASLR), the * stack protection canary and automatic variables. * The code is inspired by Apache's ssl_rand_seed() function. * * Note: * The code uses pthread_atfork() until Python has a proper atfork API. The - * handlers are not removed from the child process. + * handlers are not removed from the child process. A parent handler is used + * instead of a child handler because fork() is suppose to be async-signal + * safe but the handler calls unsafe functions. */ #if defined(HAVE_PTHREAD_ATFORK) && defined(WITH_THREAD) #define PYSSL_RAND_ATFORK 1 static void -PySSL_RAND_atfork_child(void) +PySSL_RAND_atfork_parent(void) { struct { char stack[128]; /* uninitialized (!) stack data, 128 is an @@ -2615,11 +2617,6 @@ #endif seed.pid = getpid(); _PyTime_gettimeofday(&(seed.tp)); - -#if 0 - fprintf(stderr, "PySSL_RAND_atfork_child() seeds %i bytes in pid %i\n", - (int)sizeof(seed), seed.pid); -#endif RAND_add((unsigned char *)&seed, sizeof(seed), 0.0); } @@ -2633,8 +2630,8 @@ return 0; retval = pthread_atfork(NULL, /* prepare */ - NULL, /* parent */ - PySSL_RAND_atfork_child); /* child */ + PySSL_RAND_atfork_parent, /* parent */ + NULL); /* child */ if (retval != 0) { PyErr_SetFromErrno(PyExc_OSError); return -1; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 13:24:15 2013 From: python-checkins at python.org (christian.heimes) Date: Thu, 22 Aug 2013 13:24:15 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318747=3A_Use_a_parent_atfork_handler_instead_of?= =?utf-8?q?_a_child_atfork_handler=2E?= Message-ID: <3cLNg30RD6z7LlM@mail.python.org> http://hg.python.org/cpython/rev/25e05147d662 changeset: 85315:25e05147d662 parent: 85313:b0b224e0d2b5 parent: 85314:39c9dbed3aa1 user: Christian Heimes date: Thu Aug 22 13:19:56 2013 +0200 summary: Issue #18747: Use a parent atfork handler instead of a child atfork handler. fork() is suppose to be async-signal safe but the handler calls unsafe functions. A parent handler mitigates the issue. files: Modules/_ssl.c | 21 +++++++++------------ 1 files changed, 9 insertions(+), 12 deletions(-) diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -2943,23 +2943,25 @@ /* Seed OpenSSL's PRNG at fork(), http://bugs.python.org/issue18747 * - * The child handler seeds the PRNG from pseudo-random data like pid, the - * current time (nanoseconds, miliseconds or seconds) and an uninitialized - * array. The array contains stack variables that are impossible to predict + * The parent handler seeds the PRNG from pseudo-random data like pid, the + * current time (miliseconds or seconds) and an uninitialized arry. + * The array contains stack variables that are impossible to predict * on most systems, e.g. function return address (subject to ASLR), the * stack protection canary and automatic variables. * The code is inspired by Apache's ssl_rand_seed() function. * * Note: * The code uses pthread_atfork() until Python has a proper atfork API. The - * handlers are not removed from the child process. + * handlers are not removed from the child process. A parent handler is used + * instead of a child handler because fork() is suppose to be async-signal + * safe but the handler calls unsafe functions. */ #if defined(HAVE_PTHREAD_ATFORK) && defined(WITH_THREAD) #define PYSSL_RAND_ATFORK 1 static void -PySSL_RAND_atfork_child(void) +PySSL_RAND_atfork_parent(void) { struct { char stack[128]; /* uninitialized (!) stack data, 128 is an @@ -2973,11 +2975,6 @@ #endif seed.pid = getpid(); _PyTime_gettimeofday(&(seed.tp)); - -#if 0 - fprintf(stderr, "PySSL_RAND_atfork_child() seeds %i bytes in pid %i\n", - (int)sizeof(seed), seed.pid); -#endif RAND_add((unsigned char *)&seed, sizeof(seed), 0.0); } @@ -2991,8 +2988,8 @@ return 0; retval = pthread_atfork(NULL, /* prepare */ - NULL, /* parent */ - PySSL_RAND_atfork_child); /* child */ + PySSL_RAND_atfork_parent, /* parent */ + NULL); /* child */ if (retval != 0) { PyErr_SetFromErrno(PyExc_OSError); return -1; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 13:24:16 2013 From: python-checkins at python.org (christian.heimes) Date: Thu, 22 Aug 2013 13:24:16 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NzQ3?= =?utf-8?q?=3A_Use_a_parent_atfork_handler_instead_of_a_child_atfork_handl?= =?utf-8?b?ZXIu?= Message-ID: <3cLNg42HQtz7LkN@mail.python.org> http://hg.python.org/cpython/rev/d542f905081a changeset: 85316:d542f905081a branch: 2.7 parent: 85304:48de8df194d9 user: Christian Heimes date: Thu Aug 22 13:19:48 2013 +0200 summary: Issue #18747: Use a parent atfork handler instead of a child atfork handler. fork() is suppose to be async-signal safe but the handler calls unsafe functions. A parent handler mitigates the issue. files: Modules/_ssl.c | 21 +++++++++------------ 1 files changed, 9 insertions(+), 12 deletions(-) diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -1628,23 +1628,25 @@ /* Seed OpenSSL's PRNG at fork(), http://bugs.python.org/issue18747 * - * The child handler seeds the PRNG from pseudo-random data like pid, the - * current time (nanoseconds, miliseconds or seconds) and an uninitialized - * array. The array contains stack variables that are impossible to predict + * The parent handler seeds the PRNG from pseudo-random data like pid, the + * current time (miliseconds or seconds) and an uninitialized arry. + * The array contains stack variables that are impossible to predict * on most systems, e.g. function return address (subject to ASLR), the * stack protection canary and automatic variables. * The code is inspired by Apache's ssl_rand_seed() function. * * Note: * The code uses pthread_atfork() until Python has a proper atfork API. The - * handlers are not removed from the child process. + * handlers are not removed from the child process. A parent handler is used + * instead of a child handler because fork() is suppose to be async-signal + * safe but the handler calls unsafe functions. */ #if defined(HAVE_PTHREAD_ATFORK) && defined(WITH_THREAD) #define PYSSL_RAND_ATFORK 1 static void -PySSL_RAND_atfork_child(void) +PySSL_RAND_atfork_parent(void) { struct { char stack[128]; /* uninitialized (!) stack data, 128 is an @@ -1658,11 +1660,6 @@ #endif seed.pid = getpid(); seed.time = time(NULL); - -#if 0 - fprintf(stderr, "PySSL_RAND_atfork_child() seeds %i bytes in pid %i\n", - (int)sizeof(seed), seed.pid); -#endif RAND_add((unsigned char *)&seed, sizeof(seed), 0.0); } @@ -1676,8 +1673,8 @@ return 0; retval = pthread_atfork(NULL, /* prepare */ - NULL, /* parent */ - PySSL_RAND_atfork_child); /* child */ + PySSL_RAND_atfork_parent, /* parent */ + NULL); /* child */ if (retval != 0) { PyErr_SetFromErrno(PyExc_OSError); return -1; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 13:24:17 2013 From: python-checkins at python.org (christian.heimes) Date: Thu, 22 Aug 2013 13:24:17 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzQ3?= =?utf-8?q?=3A_Update_Misc/NEWS_to_reflect_the_latest_changeset=2E?= Message-ID: <3cLNg54BnFz7Llv@mail.python.org> http://hg.python.org/cpython/rev/6ec43643c54f changeset: 85317:6ec43643c54f branch: 3.3 parent: 85314:39c9dbed3aa1 user: Christian Heimes date: Thu Aug 22 13:22:37 2013 +0200 summary: Issue #18747: Update Misc/NEWS to reflect the latest changeset. files: Misc/NEWS | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -73,7 +73,7 @@ in the tkinter module. - Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. - A pthread_atfork() child handler is used to seeded the PRNG with pid, time + A pthread_atfork() parent handler is used to seeded the PRNG with pid, time and some stack data. - Issue #8865: Concurrent invocation of select.poll.poll() now raises a -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 13:24:18 2013 From: python-checkins at python.org (christian.heimes) Date: Thu, 22 Aug 2013 13:24:18 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2318747=3A_Update_Misc/NEWS_to_reflect_the_latest?= =?utf-8?q?_changeset=2E?= Message-ID: <3cLNg663kjz7Lnc@mail.python.org> http://hg.python.org/cpython/rev/956261a143eb changeset: 85318:956261a143eb parent: 85315:25e05147d662 parent: 85317:6ec43643c54f user: Christian Heimes date: Thu Aug 22 13:22:46 2013 +0200 summary: Issue #18747: Update Misc/NEWS to reflect the latest changeset. files: Misc/NEWS | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -48,7 +48,7 @@ in the tkinter module. - Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. - A pthread_atfork() child handler is used to seeded the PRNG with pid, time + A pthread_atfork() parent handler is used to seeded the PRNG with pid, time and some stack data. - Issue #8865: Concurrent invocation of select.poll.poll() now raises a -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 13:24:20 2013 From: python-checkins at python.org (christian.heimes) Date: Thu, 22 Aug 2013 13:24:20 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE4NzQ3?= =?utf-8?q?=3A_Update_Misc/NEWS_to_reflect_the_latest_changeset=2E?= Message-ID: <3cLNg80xqVz7LlD@mail.python.org> http://hg.python.org/cpython/rev/14490ced507e changeset: 85319:14490ced507e branch: 2.7 parent: 85316:d542f905081a user: Christian Heimes date: Thu Aug 22 13:22:37 2013 +0200 summary: Issue #18747: Update Misc/NEWS to reflect the latest changeset. files: Misc/NEWS | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -39,7 +39,7 @@ module will be called in a deterministic order. - Issue #18747: Re-seed OpenSSL's pseudo-random number generator after fork. - A pthread_atfork() child handler is used to seeded the PRNG with pid, time + A pthread_atfork() parent handler is used to seeded the PRNG with pid, time and some stack data. - Issue #8865: Concurrent invocation of select.poll.poll() now raises a -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 14:05:36 2013 From: python-checkins at python.org (richard.oudkerk) Date: Thu, 22 Aug 2013 14:05:36 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Fix_compilation_of_select_?= =?utf-8?q?module_on_Solaris=2E?= Message-ID: <3cLPZm2Bh6z7LjM@mail.python.org> http://hg.python.org/cpython/rev/79e5be70221b changeset: 85320:79e5be70221b parent: 85318:956261a143eb user: Richard Oudkerk date: Thu Aug 22 13:04:23 2013 +0100 summary: Fix compilation of select module on Solaris. files: Modules/selectmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -1023,7 +1023,7 @@ } static int -devpoll_internal_close(pyEpoll_Object *self) +devpoll_internal_close(devpollObject *self) { int save_errno = 0; if (self->fd_devpoll >= 0) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 14:32:27 2013 From: python-checkins at python.org (richard.oudkerk) Date: Thu, 22 Aug 2013 14:32:27 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Move_definition_of_devpoll?= =?utf-8?b?X2ludGVybmFsX2Nsb3NlKCkgYmVmb3JlIGRldnBvbGxfY2xvc2UoKS4=?= Message-ID: <3cLQ9l5zf2z7LmY@mail.python.org> http://hg.python.org/cpython/rev/90f38db8f6ef changeset: 85321:90f38db8f6ef user: Richard Oudkerk date: Thu Aug 22 13:31:15 2013 +0100 summary: Move definition of devpoll_internal_close() before devpoll_close(). files: Modules/selectmodule.c | 30 +++++++++++++++--------------- 1 files changed, 15 insertions(+), 15 deletions(-) diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -911,6 +911,21 @@ return NULL; } +static int +devpoll_internal_close(devpollObject *self) +{ + int save_errno = 0; + if (self->fd_devpoll >= 0) { + int fd = self->fd_devpoll; + self->fd_devpoll = -1; + Py_BEGIN_ALLOW_THREADS + if (close(fd) < 0) + save_errno = errno; + Py_END_ALLOW_THREADS + } + return save_errno; +} + static PyObject* devpoll_close(devpollObject *self) { @@ -1022,21 +1037,6 @@ return self; } -static int -devpoll_internal_close(devpollObject *self) -{ - int save_errno = 0; - if (self->fd_devpoll >= 0) { - int fd = self->fd_devpoll; - self->fd_devpoll = -1; - Py_BEGIN_ALLOW_THREADS - if (close(fd) < 0) - save_errno = errno; - Py_END_ALLOW_THREADS - } - return save_errno; -} - static void devpoll_dealloc(devpollObject *self) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 15:15:38 2013 From: python-checkins at python.org (richard.oudkerk) Date: Thu, 22 Aug 2013 15:15:38 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Fix_devpoll=5Fdealloc=28?= =?utf-8?b?KS4=?= Message-ID: <3cLR7Z6xdtz7Ljg@mail.python.org> http://hg.python.org/cpython/rev/caf72b44240b changeset: 85322:caf72b44240b user: Richard Oudkerk date: Thu Aug 22 14:03:44 2013 +0100 summary: Fix devpoll_dealloc(). files: Modules/selectmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -1040,7 +1040,7 @@ static void devpoll_dealloc(devpollObject *self) { - (void)devpoll_internal_close(); + (void)devpoll_internal_close(self); PyMem_DEL(self->fds); PyObject_Del(self); } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 16:45:47 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Thu, 22 Aug 2013 16:45:47 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE2ODA5?= =?utf-8?q?=3A_Tkinter=27s_splitlist=28=29_and_split=28=29_methods_now_acc?= =?utf-8?q?ept_Tcl=5FObj?= Message-ID: <3cLT7b12n0z7LkF@mail.python.org> http://hg.python.org/cpython/rev/6b841e1ee3b8 changeset: 85323:6b841e1ee3b8 branch: 3.3 parent: 85317:6ec43643c54f user: Serhiy Storchaka date: Thu Aug 22 17:40:31 2013 +0300 summary: Issue #16809: Tkinter's splitlist() and split() methods now accept Tcl_Obj argument. This is needed for support Tcl/Tk 8.6. files: Lib/test/test_tcl.py | 4 + Misc/NEWS | 3 + Modules/_tkinter.c | 65 ++++++++++++++++++++++++++----- 3 files changed, 60 insertions(+), 12 deletions(-) diff --git a/Lib/test/test_tcl.py b/Lib/test/test_tcl.py --- a/Lib/test/test_tcl.py +++ b/Lib/test/test_tcl.py @@ -200,6 +200,8 @@ (('a', 3.4), ('a', 3.4)), ((), ()), (call('list', 1, '2', (3.4,)), (1, '2', (3.4,))), + (call('dict', 'create', 1, '\u20ac', b'\xe2\x82\xac', (3.4,)), + (1, '\u20ac', '\u20ac', (3.4,))), ] for arg, res in testcases: self.assertEqual(splitlist(arg), res, msg=arg) @@ -232,6 +234,8 @@ (('a', (2, 3.4)), ('a', (2, 3.4))), ((), ()), (call('list', 1, '2', (3.4,)), (1, '2', (3.4,))), + (call('dict', 'create', 12, '\u20ac', b'\xe2\x82\xac', (3.4,)), + (12, '\u20ac', '\u20ac', (3.4,))), ] for arg, res in testcases: self.assertEqual(split(arg), res, msg=arg) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,9 @@ Library ------- +- Issue #16809: Tkinter's splitlist() and split() methods now accept Tcl_Obj + argument. + - Issue #18324: set_payload now correctly handles binary input. This also supersedes the previous fixes for #14360, #1717, and #16564. diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -1940,16 +1940,35 @@ char *list; int argc; char **argv; - PyObject *v; + PyObject *arg, *v; int i; - if (PyTuple_Size(args) == 1) { - v = PyTuple_GetItem(args, 0); - if (PyTuple_Check(v)) { - Py_INCREF(v); - return v; + if (!PyArg_ParseTuple(args, "O:splitlist", &arg)) + return NULL; + if (PyTclObject_Check(arg)) { + int objc; + Tcl_Obj **objv; + if (Tcl_ListObjGetElements(Tkapp_Interp(self), + ((PyTclObject*)arg)->value, + &objc, &objv) == TCL_ERROR) { + return Tkinter_Error(self); } + if (!(v = PyTuple_New(objc))) + return NULL; + for (i = 0; i < objc; i++) { + PyObject *s = FromObj(self, objv[i]); + if (!s || PyTuple_SetItem(v, i, s)) { + Py_DECREF(v); + return NULL; + } + } + return v; } + if (PyTuple_Check(arg)) { + Py_INCREF(arg); + return arg; + } + if (!PyArg_ParseTuple(args, "et:splitlist", "utf-8", &list)) return NULL; @@ -1980,16 +1999,38 @@ static PyObject * Tkapp_Split(PyObject *self, PyObject *args) { - PyObject *v; + PyObject *arg, *v; char *list; - if (PyTuple_Size(args) == 1) { - PyObject* o = PyTuple_GetItem(args, 0); - if (PyTuple_Check(o)) { - o = SplitObj(o); - return o; + if (!PyArg_ParseTuple(args, "O:split", &arg)) + return NULL; + if (PyTclObject_Check(arg)) { + Tcl_Obj *value = ((PyTclObject*)arg)->value; + int objc; + Tcl_Obj **objv; + int i; + if (Tcl_ListObjGetElements(Tkapp_Interp(self), value, + &objc, &objv) == TCL_ERROR) { + return FromObj(self, value); } + if (objc == 0) + return PyUnicode_FromString(""); + if (objc == 1) + return FromObj(self, objv[0]); + if (!(v = PyTuple_New(objc))) + return NULL; + for (i = 0; i < objc; i++) { + PyObject *s = FromObj(self, objv[i]); + if (!s || PyTuple_SetItem(v, i, s)) { + Py_DECREF(v); + return NULL; + } + } + return v; } + if (PyTuple_Check(arg)) + return SplitObj(arg); + if (!PyArg_ParseTuple(args, "et:split", "utf-8", &list)) return NULL; v = Split(list); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 16:45:48 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Thu, 22 Aug 2013 16:45:48 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2316809=3A_Tkinter=27s_splitlist=28=29_and_split?= =?utf-8?q?=28=29_methods_now_accept_Tcl=5FObj?= Message-ID: <3cLT7c49Ypz7LkF@mail.python.org> http://hg.python.org/cpython/rev/85285e6e28f4 changeset: 85324:85285e6e28f4 parent: 85322:caf72b44240b parent: 85323:6b841e1ee3b8 user: Serhiy Storchaka date: Thu Aug 22 17:42:05 2013 +0300 summary: Issue #16809: Tkinter's splitlist() and split() methods now accept Tcl_Obj argument. This is needed for support Tcl/Tk 8.6. files: Lib/test/test_tcl.py | 4 + Misc/NEWS | 3 + Modules/_tkinter.c | 65 ++++++++++++++++++++++++++----- 3 files changed, 60 insertions(+), 12 deletions(-) diff --git a/Lib/test/test_tcl.py b/Lib/test/test_tcl.py --- a/Lib/test/test_tcl.py +++ b/Lib/test/test_tcl.py @@ -200,6 +200,8 @@ (('a', 3.4), ('a', 3.4)), ((), ()), (call('list', 1, '2', (3.4,)), (1, '2', (3.4,))), + (call('dict', 'create', 1, '\u20ac', b'\xe2\x82\xac', (3.4,)), + (1, '\u20ac', '\u20ac', (3.4,))), ] for arg, res in testcases: self.assertEqual(splitlist(arg), res, msg=arg) @@ -232,6 +234,8 @@ (('a', (2, 3.4)), ('a', (2, 3.4))), ((), ()), (call('list', 1, '2', (3.4,)), (1, '2', (3.4,))), + (call('dict', 'create', 12, '\u20ac', b'\xe2\x82\xac', (3.4,)), + (12, '\u20ac', '\u20ac', (3.4,))), ] for arg, res in testcases: self.assertEqual(split(arg), res, msg=arg) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -38,6 +38,9 @@ Library ------- +- Issue #16809: Tkinter's splitlist() and split() methods now accept Tcl_Obj + argument. + - Issue #18324: set_payload now correctly handles binary input. This also supersedes the previous fixes for #14360, #1717, and #16564. diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -1757,16 +1757,35 @@ char *list; int argc; char **argv; - PyObject *v; + PyObject *arg, *v; int i; - if (PyTuple_Size(args) == 1) { - v = PyTuple_GetItem(args, 0); - if (PyTuple_Check(v)) { - Py_INCREF(v); - return v; + if (!PyArg_ParseTuple(args, "O:splitlist", &arg)) + return NULL; + if (PyTclObject_Check(arg)) { + int objc; + Tcl_Obj **objv; + if (Tcl_ListObjGetElements(Tkapp_Interp(self), + ((PyTclObject*)arg)->value, + &objc, &objv) == TCL_ERROR) { + return Tkinter_Error(self); } + if (!(v = PyTuple_New(objc))) + return NULL; + for (i = 0; i < objc; i++) { + PyObject *s = FromObj(self, objv[i]); + if (!s || PyTuple_SetItem(v, i, s)) { + Py_DECREF(v); + return NULL; + } + } + return v; } + if (PyTuple_Check(arg)) { + Py_INCREF(arg); + return arg; + } + if (!PyArg_ParseTuple(args, "et:splitlist", "utf-8", &list)) return NULL; @@ -1797,16 +1816,38 @@ static PyObject * Tkapp_Split(PyObject *self, PyObject *args) { - PyObject *v; + PyObject *arg, *v; char *list; - if (PyTuple_Size(args) == 1) { - PyObject* o = PyTuple_GetItem(args, 0); - if (PyTuple_Check(o)) { - o = SplitObj(o); - return o; + if (!PyArg_ParseTuple(args, "O:split", &arg)) + return NULL; + if (PyTclObject_Check(arg)) { + Tcl_Obj *value = ((PyTclObject*)arg)->value; + int objc; + Tcl_Obj **objv; + int i; + if (Tcl_ListObjGetElements(Tkapp_Interp(self), value, + &objc, &objv) == TCL_ERROR) { + return FromObj(self, value); } + if (objc == 0) + return PyUnicode_FromString(""); + if (objc == 1) + return FromObj(self, objv[0]); + if (!(v = PyTuple_New(objc))) + return NULL; + for (i = 0; i < objc; i++) { + PyObject *s = FromObj(self, objv[i]); + if (!s || PyTuple_SetItem(v, i, s)) { + Py_DECREF(v); + return NULL; + } + } + return v; } + if (PyTuple_Check(arg)) + return SplitObj(arg); + if (!PyArg_ParseTuple(args, "et:split", "utf-8", &list)) return NULL; v = Split(list); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 16:45:50 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Thu, 22 Aug 2013 16:45:50 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE2ODA5?= =?utf-8?q?=3A_Tkinter=27s_splitlist=28=29_and_split=28=29_methods_now_acc?= =?utf-8?q?ept_Tcl=5FObj?= Message-ID: <3cLT7f0RQ0z7Lnm@mail.python.org> http://hg.python.org/cpython/rev/2d96764a66fa changeset: 85325:2d96764a66fa branch: 2.7 parent: 85319:14490ced507e user: Serhiy Storchaka date: Thu Aug 22 17:42:45 2013 +0300 summary: Issue #16809: Tkinter's splitlist() and split() methods now accept Tcl_Obj argument. This is needed for support Tcl/Tk 8.6. files: Lib/test/test_tcl.py | 4 + Misc/NEWS | 3 + Modules/_tkinter.c | 65 ++++++++++++++++++++++++++----- 3 files changed, 60 insertions(+), 12 deletions(-) diff --git a/Lib/test/test_tcl.py b/Lib/test/test_tcl.py --- a/Lib/test/test_tcl.py +++ b/Lib/test/test_tcl.py @@ -209,6 +209,8 @@ (('a', 3.4), ('a', 3.4)), ((), ()), (call('list', 1, '2', (3.4,)), (1, '2', (3.4,))), + (call('dict', 'create', 1, u'\u20ac', '\xe2\x82\xac', (3.4,)), + (1, u'\u20ac', u'\u20ac', (3.4,))), ] for arg, res in testcases: self.assertEqual(splitlist(arg), res) @@ -241,6 +243,8 @@ (('a', (2, 3.4)), ('a', (2, 3.4))), ((), ()), (call('list', 1, '2', (3.4,)), (1, '2', (3.4,))), + (call('dict', 'create', 12, u'\u20ac', '\xe2\x82\xac', (3.4,)), + (12, u'\u20ac', u'\u20ac', (3.4,))), ] for arg, res in testcases: self.assertEqual(split(arg), res) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -32,6 +32,9 @@ Library ------- +- Issue #16809: Tkinter's splitlist() and split() methods now accept Tcl_Obj + argument. + - Issue #17119: Fixed integer overflows when processing large Unicode strings and tuples in the tkinter module. diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -2004,16 +2004,35 @@ char *list; int argc; char **argv; - PyObject *v; + PyObject *arg, *v; int i; - if (PyTuple_Size(args) == 1) { - v = PyTuple_GetItem(args, 0); - if (PyTuple_Check(v)) { - Py_INCREF(v); - return v; + if (!PyArg_ParseTuple(args, "O:splitlist", &arg)) + return NULL; + if (PyTclObject_Check(arg)) { + int objc; + Tcl_Obj **objv; + if (Tcl_ListObjGetElements(Tkapp_Interp(self), + ((PyTclObject*)arg)->value, + &objc, &objv) == TCL_ERROR) { + return Tkinter_Error(self); } + if (!(v = PyTuple_New(objc))) + return NULL; + for (i = 0; i < objc; i++) { + PyObject *s = FromObj(self, objv[i]); + if (!s || PyTuple_SetItem(v, i, s)) { + Py_DECREF(v); + return NULL; + } + } + return v; } + if (PyTuple_Check(arg)) { + Py_INCREF(arg); + return arg; + } + if (!PyArg_ParseTuple(args, "et:splitlist", "utf-8", &list)) return NULL; @@ -2044,16 +2063,38 @@ static PyObject * Tkapp_Split(PyObject *self, PyObject *args) { - PyObject *v; + PyObject *arg, *v; char *list; - if (PyTuple_Size(args) == 1) { - PyObject* o = PyTuple_GetItem(args, 0); - if (PyTuple_Check(o)) { - o = SplitObj(o); - return o; + if (!PyArg_ParseTuple(args, "O:split", &arg)) + return NULL; + if (PyTclObject_Check(arg)) { + Tcl_Obj *value = ((PyTclObject*)arg)->value; + int objc; + Tcl_Obj **objv; + int i; + if (Tcl_ListObjGetElements(Tkapp_Interp(self), value, + &objc, &objv) == TCL_ERROR) { + return FromObj(self, value); } + if (objc == 0) + return PyString_FromString(""); + if (objc == 1) + return FromObj(self, objv[0]); + if (!(v = PyTuple_New(objc))) + return NULL; + for (i = 0; i < objc; i++) { + PyObject *s = FromObj(self, objv[i]); + if (!s || PyTuple_SetItem(v, i, s)) { + Py_DECREF(v); + return NULL; + } + } + return v; } + if (PyTuple_Check(arg)) + return SplitObj(arg); + if (!PyArg_ParseTuple(args, "et:split", "utf-8", &list)) return NULL; v = Split(list); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 16:56:02 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Thu, 22 Aug 2013 16:56:02 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE2ODA5?= =?utf-8?q?=3A_Fixed_some_tkinter_incompabilities_with_Tcl/Tk_8=2E6=2E?= Message-ID: <3cLTMQ3Tx0z7Ljk@mail.python.org> http://hg.python.org/cpython/rev/516b0cd87a90 changeset: 85326:516b0cd87a90 branch: 3.3 parent: 85323:6b841e1ee3b8 user: Serhiy Storchaka date: Thu Aug 22 17:51:58 2013 +0300 summary: Issue #16809: Fixed some tkinter incompabilities with Tcl/Tk 8.6. files: Lib/tkinter/__init__.py | 8 ++++---- Lib/tkinter/test/test_ttk/test_widgets.py | 4 ++-- Misc/NEWS | 2 ++ 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -1352,7 +1352,7 @@ value = words[i+1] if not value: value = None - elif '.' in value: + elif '.' in str(value): value = getdouble(value) else: value = getint(value) @@ -1921,7 +1921,7 @@ for i in range(0, len(words), 2): key = words[i][1:] value = words[i+1] - if value[:1] == '.': + if str(value)[:1] == '.': value = self._nametowidget(value) dict[key] = value return dict @@ -1972,7 +1972,7 @@ for i in range(0, len(words), 2): key = words[i][1:] value = words[i+1] - if value[:1] == '.': + if str(value)[:1] == '.': value = self._nametowidget(value) dict[key] = value return dict @@ -2021,7 +2021,7 @@ for i in range(0, len(words), 2): key = words[i][1:] value = words[i+1] - if value[:1] == '.': + if str(value)[:1] == '.': value = self._nametowidget(value) dict[key] = value return dict diff --git a/Lib/tkinter/test/test_ttk/test_widgets.py b/Lib/tkinter/test/test_ttk/test_widgets.py --- a/Lib/tkinter/test/test_ttk/test_widgets.py +++ b/Lib/tkinter/test/test_ttk/test_widgets.py @@ -105,7 +105,7 @@ cbtn['command'] = '' res = cbtn.invoke() - self.assertEqual(res, '') + self.assertEqual(str(res), '') self.assertFalse(len(success) > 1) self.assertEqual(cbtn['offvalue'], cbtn.tk.globalgetvar(cbtn['variable'])) @@ -453,7 +453,7 @@ cbtn2['command'] = '' res = cbtn2.invoke() - self.assertEqual(res, '') + self.assertEqual(str(res), '') self.assertFalse(len(success) > 1) self.assertEqual(cbtn2['value'], myvar.get()) self.assertEqual(myvar.get(), diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,8 @@ Library ------- +- Issue #16809: Fixed some tkinter incompabilities with Tcl/Tk 8.6. + - Issue #16809: Tkinter's splitlist() and split() methods now accept Tcl_Obj argument. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 16:56:03 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Thu, 22 Aug 2013 16:56:03 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Issue_=2316809=3A_Fixed_some_tkinter_incompabilities_wit?= =?utf-8?q?h_Tcl/Tk_8=2E6=2E?= Message-ID: <3cLTMR5b4Xz7Ll0@mail.python.org> http://hg.python.org/cpython/rev/5b5ef012cd4e changeset: 85327:5b5ef012cd4e parent: 85324:85285e6e28f4 parent: 85326:516b0cd87a90 user: Serhiy Storchaka date: Thu Aug 22 17:53:06 2013 +0300 summary: Issue #16809: Fixed some tkinter incompabilities with Tcl/Tk 8.6. files: Lib/tkinter/__init__.py | 8 ++++---- Lib/tkinter/test/test_ttk/test_widgets.py | 4 ++-- Misc/NEWS | 2 ++ 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -1352,7 +1352,7 @@ value = words[i+1] if not value: value = None - elif '.' in value: + elif '.' in str(value): value = getdouble(value) else: value = getint(value) @@ -1921,7 +1921,7 @@ for i in range(0, len(words), 2): key = words[i][1:] value = words[i+1] - if value[:1] == '.': + if str(value)[:1] == '.': value = self._nametowidget(value) dict[key] = value return dict @@ -1972,7 +1972,7 @@ for i in range(0, len(words), 2): key = words[i][1:] value = words[i+1] - if value[:1] == '.': + if str(value)[:1] == '.': value = self._nametowidget(value) dict[key] = value return dict @@ -2021,7 +2021,7 @@ for i in range(0, len(words), 2): key = words[i][1:] value = words[i+1] - if value[:1] == '.': + if str(value)[:1] == '.': value = self._nametowidget(value) dict[key] = value return dict diff --git a/Lib/tkinter/test/test_ttk/test_widgets.py b/Lib/tkinter/test/test_ttk/test_widgets.py --- a/Lib/tkinter/test/test_ttk/test_widgets.py +++ b/Lib/tkinter/test/test_ttk/test_widgets.py @@ -105,7 +105,7 @@ cbtn['command'] = '' res = cbtn.invoke() - self.assertEqual(res, '') + self.assertEqual(str(res), '') self.assertFalse(len(success) > 1) self.assertEqual(cbtn['offvalue'], cbtn.tk.globalgetvar(cbtn['variable'])) @@ -453,7 +453,7 @@ cbtn2['command'] = '' res = cbtn2.invoke() - self.assertEqual(res, '') + self.assertEqual(str(res), '') self.assertFalse(len(success) > 1) self.assertEqual(cbtn2['value'], myvar.get()) self.assertEqual(myvar.get(), diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -38,6 +38,8 @@ Library ------- +- Issue #16809: Fixed some tkinter incompabilities with Tcl/Tk 8.6. + - Issue #16809: Tkinter's splitlist() and split() methods now accept Tcl_Obj argument. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 16:56:05 2013 From: python-checkins at python.org (serhiy.storchaka) Date: Thu, 22 Aug 2013 16:56:05 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE2ODA5?= =?utf-8?q?=3A_Fixed_some_tkinter_incompabilities_with_Tcl/Tk_8=2E6=2E?= Message-ID: <3cLTMT0R82z7Lkq@mail.python.org> http://hg.python.org/cpython/rev/06244031b608 changeset: 85328:06244031b608 branch: 2.7 parent: 85325:2d96764a66fa user: Serhiy Storchaka date: Thu Aug 22 17:53:16 2013 +0300 summary: Issue #16809: Fixed some tkinter incompabilities with Tcl/Tk 8.6. files: Lib/lib-tk/Tkinter.py | 8 ++++---- Lib/lib-tk/test/test_ttk/test_widgets.py | 4 ++-- Misc/NEWS | 2 ++ 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/Lib/lib-tk/Tkinter.py b/Lib/lib-tk/Tkinter.py --- a/Lib/lib-tk/Tkinter.py +++ b/Lib/lib-tk/Tkinter.py @@ -1347,7 +1347,7 @@ value = words[i+1] if not value: value = None - elif '.' in value: + elif '.' in str(value): value = getdouble(value) else: value = getint(value) @@ -1880,7 +1880,7 @@ for i in range(0, len(words), 2): key = words[i][1:] value = words[i+1] - if value[:1] == '.': + if str(value)[:1] == '.': value = self._nametowidget(value) dict[key] = value return dict @@ -1931,7 +1931,7 @@ for i in range(0, len(words), 2): key = words[i][1:] value = words[i+1] - if value[:1] == '.': + if str(value)[:1] == '.': value = self._nametowidget(value) dict[key] = value return dict @@ -1980,7 +1980,7 @@ for i in range(0, len(words), 2): key = words[i][1:] value = words[i+1] - if value[:1] == '.': + if str(value)[:1] == '.': value = self._nametowidget(value) dict[key] = value return dict diff --git a/Lib/lib-tk/test/test_ttk/test_widgets.py b/Lib/lib-tk/test/test_ttk/test_widgets.py --- a/Lib/lib-tk/test/test_ttk/test_widgets.py +++ b/Lib/lib-tk/test/test_ttk/test_widgets.py @@ -104,7 +104,7 @@ cbtn['command'] = '' res = cbtn.invoke() - self.assertEqual(res, '') + self.assertEqual(str(res), '') self.assertFalse(len(success) > 1) self.assertEqual(cbtn['offvalue'], cbtn.tk.globalgetvar(cbtn['variable'])) @@ -452,7 +452,7 @@ cbtn2['command'] = '' res = cbtn2.invoke() - self.assertEqual(res, '') + self.assertEqual(str(res), '') self.assertFalse(len(success) > 1) self.assertEqual(cbtn2['value'], myvar.get()) self.assertEqual(myvar.get(), diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -32,6 +32,8 @@ Library ------- +- Issue #16809: Fixed some tkinter incompabilities with Tcl/Tk 8.6. + - Issue #16809: Tkinter's splitlist() and split() methods now accept Tcl_Obj argument. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Aug 22 17:20:44 2013 From: python-checkins at python.org (raymond.hettinger) Date: Thu, 22 Aug 2013 17:20:44 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Issue_18797=3A__Remove_unn?= =?utf-8?q?eeded_refcount_adjustments_for_dummy_objects=2E?= Message-ID: <3cLTvw0Lfxz7Ljp@mail.python.org> http://hg.python.org/cpython/rev/ac6dab1819c4 changeset: 85329:ac6dab1819c4 parent: 85327:5b5ef012cd4e user: Raymond Hettinger date: Thu Aug 22 08:20:31 2013 -0700 summary: Issue 18797: Remove unneeded refcount adjustments for dummy objects. It suffices to keep just one reference when the object is created. files: Objects/setobject.c | 22 ++++++---------------- 1 files changed, 6 insertions(+), 16 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -274,7 +274,6 @@ entry->key = key; entry->hash = hash; so->used++; - Py_DECREF(dummy); } else { /* ACTIVE */ Py_DECREF(key); @@ -381,23 +380,15 @@ so->table = newtable; so->mask = newsize - 1; memset(newtable, 0, sizeof(setentry) * newsize); + i = so->used; so->used = 0; - i = so->fill; so->fill = 0; /* Copy the data over; this is refcount-neutral for active entries; dummy entries aren't copied over, of course */ dummy_entry = dummy; for (entry = oldtable; i > 0; entry++) { - if (entry->key == NULL) { - /* UNUSED */ - ; - } else if (entry->key == dummy_entry) { - /* DUMMY */ - --i; - assert(entry->key == dummy); - Py_DECREF(entry->key); - } else { + if (entry->key != NULL && entry->key != dummy_entry) { /* ACTIVE */ --i; set_insert_clean(so, entry->key, entry->hash); @@ -468,7 +459,6 @@ if (entry->key == NULL || entry->key == dummy) return DISCARD_NOTFOUND; old_key = entry->key; - Py_INCREF(dummy); entry->key = dummy; so->used--; Py_DECREF(old_key); @@ -496,7 +486,6 @@ if (entry->key == NULL || entry->key == dummy) return DISCARD_NOTFOUND; old_key = entry->key; - Py_INCREF(dummy); entry->key = dummy; so->used--; Py_DECREF(old_key); @@ -554,7 +543,8 @@ #endif if (entry->key) { --fill; - Py_DECREF(entry->key); + if (entry->key != dummy) + Py_DECREF(entry->key); } #ifdef Py_DEBUG else @@ -615,7 +605,8 @@ for (entry = so->table; fill > 0; entry++) { if (entry->key) { --fill; - Py_DECREF(entry->key); + if (entry->key != dummy) + Py_DECREF(entry->key); } } if (so->table != so->smalltable) @@ -788,7 +779,6 @@ } } key = entry->key; - Py_INCREF(dummy); entry->key = dummy; so->used--; so->table[0].hash = i + 1; /* next place to start */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 00:39:34 2013 From: python-checkins at python.org (richard.oudkerk) Date: Fri, 23 Aug 2013 00:39:34 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Make_test_for_semaphore_tr?= =?utf-8?q?acker_capture_and_check_the_debug_output=2E?= Message-ID: <3cLgfG3Fbhz7Ljg@mail.python.org> http://hg.python.org/cpython/rev/d51486f8ac08 changeset: 85330:d51486f8ac08 user: Richard Oudkerk date: Thu Aug 22 23:37:23 2013 +0100 summary: Make test for semaphore tracker capture and check the debug output. files: Lib/test/_test_multiprocessing.py | 14 +++++++------- 1 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -3580,16 +3580,11 @@ os.write(%d, lock2._semlock.name.encode("ascii") + b"\\n") time.sleep(10) ''' - print("\nTestSemaphoreTracker will output warnings a bit like:\n" - " ... There appear to be 2 leaked semaphores" - " to clean up at shutdown\n" - " ... '/mp-03jgqz': [Errno 2] No such file or directory", - file=sys.stderr) r, w = os.pipe() p = subprocess.Popen([sys.executable, - #'-W', 'ignore:semaphore_tracker', '-c', cmd % (w, w)], - pass_fds=[w]) + pass_fds=[w], + stderr=subprocess.PIPE) os.close(w) with open(r, 'rb', closefd=True) as f: name1 = f.readline().rstrip().decode('ascii') @@ -3602,6 +3597,11 @@ _multiprocessing.sem_unlink(name2) # docs say it should be ENOENT, but OSX seems to give EINVAL self.assertIn(ctx.exception.errno, (errno.ENOENT, errno.EINVAL)) + err = p.stderr.read().decode('utf-8') + p.stderr.close() + expected = 'semaphore_tracker: There appear to be 2 leaked semaphores' + self.assertRegex(err, expected) + self.assertRegex(err, 'semaphore_tracker: %r: \[Errno' % name1) # # Mixins -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 01:43:32 2013 From: python-checkins at python.org (tim.peters) Date: Fri, 23 Aug 2013 01:43:32 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=282=2E7=29=3A_Add_line_expla?= =?utf-8?q?ining_the_=22=25sort=22_test=2E?= Message-ID: <3cLj444Dkmz7Ljk@mail.python.org> http://hg.python.org/cpython/rev/1ea833ecaf5a changeset: 85331:1ea833ecaf5a branch: 2.7 parent: 85328:06244031b608 user: Tim Peters date: Thu Aug 22 18:32:53 2013 -0500 summary: Add line explaining the "%sort" test. files: Objects/listsort.txt | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Objects/listsort.txt b/Objects/listsort.txt --- a/Objects/listsort.txt +++ b/Objects/listsort.txt @@ -60,6 +60,7 @@ /sort: ascending data 3sort: ascending, then 3 random exchanges +sort: ascending, then 10 random at the end + %sort: ascending, then randomly replace 1% of elements w/ random values ~sort: many duplicates =sort: all equal !sort: worst case scenario -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 01:43:33 2013 From: python-checkins at python.org (tim.peters) Date: Fri, 23 Aug 2013 01:43:33 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_Add_line_expla?= =?utf-8?q?ining_the_=22=25sort=22_test=2E?= Message-ID: <3cLj4563tjz7Ljk@mail.python.org> http://hg.python.org/cpython/rev/8927d15c0785 changeset: 85332:8927d15c0785 branch: 3.3 parent: 85326:516b0cd87a90 user: Tim Peters date: Thu Aug 22 18:32:53 2013 -0500 summary: Add line explaining the "%sort" test. (grafted from 1ea833ecaf5a9d43a886e9e73b4e2551d0d5b548) files: Objects/listsort.txt | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Objects/listsort.txt b/Objects/listsort.txt --- a/Objects/listsort.txt +++ b/Objects/listsort.txt @@ -60,6 +60,7 @@ /sort: ascending data 3sort: ascending, then 3 random exchanges +sort: ascending, then 10 random at the end + %sort: ascending, then randomly replace 1% of elements w/ random values ~sort: many duplicates =sort: all equal !sort: worst case scenario -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 01:43:35 2013 From: python-checkins at python.org (tim.peters) Date: Fri, 23 Aug 2013 01:43:35 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_Add_line_explaining_the_=22=25sort=22_test=2E?= Message-ID: <3cLj470qMpz7LnD@mail.python.org> http://hg.python.org/cpython/rev/41d459f96704 changeset: 85333:41d459f96704 parent: 85330:d51486f8ac08 parent: 85332:8927d15c0785 user: Tim Peters date: Thu Aug 22 18:42:02 2013 -0500 summary: Add line explaining the "%sort" test. files: Objects/listsort.txt | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Objects/listsort.txt b/Objects/listsort.txt --- a/Objects/listsort.txt +++ b/Objects/listsort.txt @@ -60,6 +60,7 @@ /sort: ascending data 3sort: ascending, then 3 random exchanges +sort: ascending, then 10 random at the end + %sort: ascending, then randomly replace 1% of elements w/ random values ~sort: many duplicates =sort: all equal !sort: worst case scenario -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Fri Aug 23 06:09:55 2013 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Fri, 23 Aug 2013 06:09:55 +0200 Subject: [Python-checkins] Daily reference leaks (41d459f96704): sum=0 Message-ID: results for 41d459f96704 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogqE4MTD', '-x'] From python-checkins at python.org Fri Aug 23 10:22:27 2013 From: python-checkins at python.org (raymond.hettinger) Date: Fri, 23 Aug 2013 10:22:27 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Add_the_same_dummy_type_th?= =?utf-8?q?at_is_used_in_dictionaries=2E?= Message-ID: <3cLwZq2Jrqz7Lr6@mail.python.org> http://hg.python.org/cpython/rev/f0202c3daa7a changeset: 85334:f0202c3daa7a user: Raymond Hettinger date: Fri Aug 23 03:22:15 2013 -0500 summary: Add the same dummy type that is used in dictionaries. files: Objects/setobject.c | 64 +++++++++++++++++++++++++------- 1 files changed, 49 insertions(+), 15 deletions(-) diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -29,7 +29,10 @@ #define PERTURB_SHIFT 5 /* Object used as dummy key to fill deleted entries */ -static PyObject *dummy = NULL; /* Initialized by first call to make_new_set() */ + +static PyObject _dummy_struct; + +#define dummy (&_dummy_struct) #ifdef Py_REF_DEBUG PyObject * @@ -329,7 +332,6 @@ Py_ssize_t i; int is_oldtable_malloced; setentry small_copy[PySet_MINSIZE]; - PyObject *dummy_entry; assert(minused >= 0); @@ -386,10 +388,8 @@ /* Copy the data over; this is refcount-neutral for active entries; dummy entries aren't copied over, of course */ - dummy_entry = dummy; for (entry = oldtable; i > 0; entry++) { - if (entry->key != NULL && entry->key != dummy_entry) { - /* ACTIVE */ + if (entry->key != NULL && entry->key != dummy) { --i; set_insert_clean(so, entry->key, entry->hash); } @@ -674,7 +674,6 @@ { PySetObject *other; PyObject *key; - PyObject *dummy_entry; Py_hash_t hash; Py_ssize_t i; setentry *entry; @@ -694,13 +693,12 @@ if (set_table_resize(so, (so->used + other->used)*2) != 0) return -1; } - dummy_entry = dummy; for (i = 0; i <= other->mask; i++) { entry = &other->table[i]; key = entry->key; hash = entry->hash; if (key != NULL && - key != dummy_entry) { + key != dummy) { Py_INCREF(key); if (set_insert_key(so, key, hash) == -1) { Py_DECREF(key); @@ -1070,12 +1068,6 @@ { PySetObject *so = NULL; - if (dummy == NULL) { /* Auto-initialize dummy */ - dummy = PyUnicode_FromString(""); - if (dummy == NULL) - return NULL; - } - /* create PySetObject structure */ if (numfree && (type == &PySet_Type || type == &PyFrozenSet_Type)) { @@ -1172,7 +1164,6 @@ PySet_Fini(void) { PySet_ClearFreeList(); - Py_CLEAR(dummy); Py_CLEAR(emptyfrozenset); } @@ -2581,3 +2572,46 @@ #undef assertRaises #endif + +/***** Dummy Struct *************************************************/ + +static PyObject * +dummy_repr(PyObject *op) +{ + return PyUnicode_FromString(""); +} + +static void +dummy_dealloc(PyObject* ignore) +{ + Py_FatalError("deallocating "); +} + +static PyTypeObject _PySetDummy_Type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + " type", + 0, + 0, + dummy_dealloc, /*tp_dealloc*/ /*never called*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_reserved*/ + dummy_repr, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash */ + 0, /*tp_call */ + 0, /*tp_str */ + 0, /*tp_getattro */ + 0, /*tp_setattro */ + 0, /*tp_as_buffer */ + Py_TPFLAGS_DEFAULT, /*tp_flags */ +}; + +static PyObject _dummy_struct = { + _PyObject_EXTRA_INIT + 2, &_PySetDummy_Type +}; + -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 17:00:12 2013 From: python-checkins at python.org (brett.cannon) Date: Fri, 23 Aug 2013 17:00:12 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_Emphasize_that?= =?utf-8?q?_people_should_not_override_=5F=5Fimport=5F=5F=2E?= Message-ID: <3cM5Pm0zDmz7Lkh@mail.python.org> http://hg.python.org/cpython/rev/7506a072ebdb changeset: 85335:7506a072ebdb branch: 3.3 parent: 85332:8927d15c0785 user: Brett Cannon date: Fri Aug 23 10:58:49 2013 -0400 summary: Emphasize that people should not override __import__. With importlib getting used more and more, changing __import__ will not work as well as people used to hope as it will potentially bypass importers, etc. It also will not work with importlib.import_module() as it uses "importlib.__import__" (i.e. importlib's implementation of import) directly and not builtins.__import__. files: Doc/library/functions.rst | 9 +++++---- 1 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -1451,10 +1451,11 @@ This function is invoked by the :keyword:`import` statement. It can be replaced (by importing the :mod:`builtins` module and assigning to ``builtins.__import__``) in order to change semantics of the - :keyword:`import` statement, but nowadays it is usually simpler to use import - hooks (see :pep:`302`) to attain the same goals. Direct use of - :func:`__import__` is entirely discouraged in favor of - :func:`importlib.import_module`. + :keyword:`import` statement, but doing so is **strongly** discouraged as it + is usually simpler to use import hooks (see :pep:`302`) to attain the same + goals and does not cause issues with code which assumes the default import + implementation is in use. Direct use of :func:`__import__` is also + discouraged in favor of :func:`importlib.import_module`. The function imports the module *name*, potentially using the given *globals* and *locals* to determine how to interpret the name in a package context. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 17:00:13 2013 From: python-checkins at python.org (brett.cannon) Date: Fri, 23 Aug 2013 17:00:13 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_merge_emphasized_discouragement_of_overriding_=5F=5Fimpo?= =?utf-8?q?rt=5F=5F?= Message-ID: <3cM5Pn61fDz7Lq0@mail.python.org> http://hg.python.org/cpython/rev/391f36ef461a changeset: 85336:391f36ef461a parent: 85334:f0202c3daa7a parent: 85335:7506a072ebdb user: Brett Cannon date: Fri Aug 23 11:00:04 2013 -0400 summary: merge emphasized discouragement of overriding __import__ files: Doc/library/functions.rst | 9 +++++---- 1 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -1467,10 +1467,11 @@ This function is invoked by the :keyword:`import` statement. It can be replaced (by importing the :mod:`builtins` module and assigning to ``builtins.__import__``) in order to change semantics of the - :keyword:`import` statement, but nowadays it is usually simpler to use import - hooks (see :pep:`302`) to attain the same goals. Direct use of - :func:`__import__` is entirely discouraged in favor of - :func:`importlib.import_module`. + :keyword:`import` statement, but doing so is **strongly** discouraged as it + is usually simpler to use import hooks (see :pep:`302`) to attain the same + goals and does not cause issues with code which assumes the default import + implementation is in use. Direct use of :func:`__import__` is also + discouraged in favor of :func:`importlib.import_module`. The function imports the module *name*, potentially using the given *globals* and *locals* to determine how to interpret the name in a package context. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 17:52:54 2013 From: python-checkins at python.org (brett.cannon) Date: Fri, 23 Aug 2013 17:52:54 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogSXNzdWUgIzE4NzU1?= =?utf-8?q?=3A_Allow_imp=2Eload=5F*=28=29_loaders_to_have_get=5Fdata=28=29?= =?utf-8?q?_called?= Message-ID: <3cM6ZZ00smz7LqT@mail.python.org> http://hg.python.org/cpython/rev/ddd610cb65ef changeset: 85337:ddd610cb65ef branch: 3.3 parent: 85335:7506a072ebdb user: Brett Cannon date: Fri Aug 23 11:45:57 2013 -0400 summary: Issue #18755: Allow imp.load_*() loaders to have get_data() called multiple times. files: Lib/imp.py | 9 +++++++-- Lib/test/test_imp.py | 7 +++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/Lib/imp.py b/Lib/imp.py --- a/Lib/imp.py +++ b/Lib/imp.py @@ -90,13 +90,18 @@ def get_data(self, path): """Gross hack to contort loader to deal w/ load_*()'s bad API.""" if self.file and path == self.path: - with self.file: + if not self.file.closed: + file = self.file + else: + self.file = file = open(self.path, 'r') + + with file: # Technically should be returning bytes, but # SourceLoader.get_code() just passed what is returned to # compile() which can handle str. And converting to bytes would # require figuring out the encoding to decode to and # tokenize.detect_encoding() only accepts bytes. - return self.file.read() + return file.read() else: return super().get_data(path) diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -248,6 +248,13 @@ return imp.load_module(name, None, *found[1:]) + def test_multiple_calls_to_get_data(self): + # Issue #18755: make sure multiple calls to get_data() can succeed. + loader = imp._LoadSourceCompatibility('imp', imp.__file__, + open(imp.__file__)) + loader.get_data(imp.__file__) # File should be closed + loader.get_data(imp.__file__) # Will need to create a newly opened file + class ReloadTests(unittest.TestCase): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 17:52:55 2013 From: python-checkins at python.org (brett.cannon) Date: Fri, 23 Aug 2013 17:52:55 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=283=2E3=29=3A_NEW_entry_for_?= =?utf-8?q?issue_=2318755?= Message-ID: <3cM6Zb1s28z7LsB@mail.python.org> http://hg.python.org/cpython/rev/b107f7a8730d changeset: 85338:b107f7a8730d branch: 3.3 user: Brett Cannon date: Fri Aug 23 11:47:26 2013 -0400 summary: NEW entry for issue #18755 files: Misc/NEWS | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,9 @@ Library ------- +- Issue #18755: Fixed the loader used in imp to allow get_data() to be called + multiple times. + - Issue #16809: Fixed some tkinter incompabilities with Tcl/Tk 8.6. - Issue #16809: Tkinter's splitlist() and split() methods now accept Tcl_Obj -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 17:52:57 2013 From: python-checkins at python.org (brett.cannon) Date: Fri, 23 Aug 2013 17:52:57 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_merge_for_issue_=2318755?= Message-ID: <3cM6Zd15k2z7LqT@mail.python.org> http://hg.python.org/cpython/rev/7d30ecf5c916 changeset: 85339:7d30ecf5c916 parent: 85336:391f36ef461a parent: 85337:ddd610cb65ef user: Brett Cannon date: Fri Aug 23 11:52:19 2013 -0400 summary: merge for issue #18755 files: Lib/imp.py | 9 +++++++-- Lib/test/test_imp.py | 9 +++++++++ 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/Lib/imp.py b/Lib/imp.py --- a/Lib/imp.py +++ b/Lib/imp.py @@ -140,13 +140,18 @@ def get_data(self, path): """Gross hack to contort loader to deal w/ load_*()'s bad API.""" if self.file and path == self.path: - with self.file: + if not self.file.closed: + file = self.file + else: + self.file = file = open(self.path, 'r') + + with file: # Technically should be returning bytes, but # SourceLoader.get_code() just passed what is returned to # compile() which can handle str. And converting to bytes would # require figuring out the encoding to decode to and # tokenize.detect_encoding() only accepts bytes. - return self.file.read() + return file.read() else: return super().get_data(path) diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -275,6 +275,7 @@ return imp.load_module(name, None, *found[1:]) +<<<<<<< local @unittest.skipIf(sys.dont_write_bytecode, "test meaningful only when writing bytecode") def test_bug7732(self): @@ -283,6 +284,14 @@ os.mkdir(source) self.assertRaisesRegex(ImportError, '^No module', imp.find_module, support.TESTFN, ["."]) +======= + def test_multiple_calls_to_get_data(self): + # Issue #18755: make sure multiple calls to get_data() can succeed. + loader = imp._LoadSourceCompatibility('imp', imp.__file__, + open(imp.__file__)) + loader.get_data(imp.__file__) # File should be closed + loader.get_data(imp.__file__) # Will need to create a newly opened file +>>>>>>> other class ReloadTests(unittest.TestCase): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 17:52:58 2013 From: python-checkins at python.org (brett.cannon) Date: Fri, 23 Aug 2013 17:52:58 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?q?=29=3A_merge_for_issue_=2318755?= Message-ID: <3cM6Zf2z67z7Lrv@mail.python.org> http://hg.python.org/cpython/rev/174bfa137472 changeset: 85340:174bfa137472 parent: 85339:7d30ecf5c916 parent: 85338:b107f7a8730d user: Brett Cannon date: Fri Aug 23 11:52:44 2013 -0400 summary: merge for issue #18755 files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 18:10:16 2013 From: python-checkins at python.org (brett.cannon) Date: Fri, 23 Aug 2013 18:10:16 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython=3A_Delete_merge_markers?= Message-ID: <3cM6yc5zVZz7Ljb@mail.python.org> http://hg.python.org/cpython/rev/761174766482 changeset: 85341:761174766482 user: Brett Cannon date: Fri Aug 23 12:10:09 2013 -0400 summary: Delete merge markers files: Lib/test/test_imp.py | 4 +--- 1 files changed, 1 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -275,7 +275,6 @@ return imp.load_module(name, None, *found[1:]) -<<<<<<< local @unittest.skipIf(sys.dont_write_bytecode, "test meaningful only when writing bytecode") def test_bug7732(self): @@ -284,14 +283,13 @@ os.mkdir(source) self.assertRaisesRegex(ImportError, '^No module', imp.find_module, support.TESTFN, ["."]) -======= + def test_multiple_calls_to_get_data(self): # Issue #18755: make sure multiple calls to get_data() can succeed. loader = imp._LoadSourceCompatibility('imp', imp.__file__, open(imp.__file__)) loader.get_data(imp.__file__) # File should be closed loader.get_data(imp.__file__) # Will need to create a newly opened file ->>>>>>> other class ReloadTests(unittest.TestCase): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 19:24:49 2013 From: python-checkins at python.org (victor.stinner) Date: Fri, 23 Aug 2013 19:24:49 +0200 (CEST) Subject: [Python-checkins] =?utf-8?b?Y3B5dGhvbiAoMy4zKTogQ2xvc2UgIzE3NzAy?= =?utf-8?q?=3A_On_error=2C_os=2Eenvironb_now_removes_suppress_the_except_c?= =?utf-8?q?ontext?= Message-ID: <3cM8cd4nLXz7Lkh@mail.python.org> http://hg.python.org/cpython/rev/26c049dc1a4a changeset: 85342:26c049dc1a4a branch: 3.3 parent: 85338:b107f7a8730d user: Victor Stinner date: Fri Aug 23 19:19:15 2013 +0200 summary: Close #17702: On error, os.environb now removes suppress the except context when raising a new KeyError with the original key. files: Lib/os.py | 4 ++-- Lib/test/test_os.py | 3 +++ Misc/NEWS | 3 +++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/Lib/os.py b/Lib/os.py --- a/Lib/os.py +++ b/Lib/os.py @@ -673,7 +673,7 @@ value = self._data[self.encodekey(key)] except KeyError: # raise KeyError with the original key value - raise KeyError(key) + raise KeyError(key) from None return self.decodevalue(value) def __setitem__(self, key, value): @@ -689,7 +689,7 @@ del self._data[encodedkey] except KeyError: # raise KeyError with the original key value - raise KeyError(key) + raise KeyError(key) from None def __iter__(self): for key in self._data: diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -644,10 +644,13 @@ with self.assertRaises(KeyError) as cm: os.environ[missing] self.assertIs(cm.exception.args[0], missing) + self.assertTrue(cm.exception.__suppress_context__) with self.assertRaises(KeyError) as cm: del os.environ[missing] self.assertIs(cm.exception.args[0], missing) + self.assertTrue(cm.exception.__suppress_context__) + class WalkTests(unittest.TestCase): """Tests for os.walk().""" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -66,6 +66,9 @@ Library ------- +- Issue #17702: On error, os.environb now removes suppress the except context + when raising a new KeyError with the original key. + - Issue #18755: Fixed the loader used in imp to allow get_data() to be called multiple times. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 19:24:50 2013 From: python-checkins at python.org (victor.stinner) Date: Fri, 23 Aug 2013 19:24:50 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?cpython_=28merge_3=2E3_-=3E_default?= =?utf-8?b?KTogKE1lcmdlIDMuMykgQ2xvc2UgIzE3NzAyOiBPbiBlcnJvciwgb3MuZW52?= =?utf-8?q?ironb_now_removes_suppress_the_except?= Message-ID: <3cM8cf71zWz7Ls3@mail.python.org> http://hg.python.org/cpython/rev/01f33959ddf6 changeset: 85343:01f33959ddf6 parent: 85341:761174766482 parent: 85342:26c049dc1a4a user: Victor Stinner date: Fri Aug 23 19:23:42 2013 +0200 summary: (Merge 3.3) Close #17702: On error, os.environb now removes suppress the except context when raising a new KeyError with the original key. files: Lib/os.py | 4 ++-- Lib/test/test_os.py | 3 +++ Misc/NEWS | 3 +++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/Lib/os.py b/Lib/os.py --- a/Lib/os.py +++ b/Lib/os.py @@ -648,7 +648,7 @@ value = self._data[self.encodekey(key)] except KeyError: # raise KeyError with the original key value - raise KeyError(key) + raise KeyError(key) from None return self.decodevalue(value) def __setitem__(self, key, value): @@ -664,7 +664,7 @@ del self._data[encodedkey] except KeyError: # raise KeyError with the original key value - raise KeyError(key) + raise KeyError(key) from None def __iter__(self): for key in self._data: diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -646,10 +646,13 @@ with self.assertRaises(KeyError) as cm: os.environ[missing] self.assertIs(cm.exception.args[0], missing) + self.assertTrue(cm.exception.__suppress_context__) with self.assertRaises(KeyError) as cm: del os.environ[missing] self.assertIs(cm.exception.args[0], missing) + self.assertTrue(cm.exception.__suppress_context__) + class WalkTests(unittest.TestCase): """Tests for os.walk().""" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -38,6 +38,9 @@ Library ------- +- Issue #17702: On error, os.environb now removes suppress the except context + when raising a new KeyError with the original key. + - Issue #16809: Fixed some tkinter incompabilities with Tcl/Tk 8.6. - Issue #16809: Tkinter's splitlist() and split() methods now accept Tcl_Obj -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Aug 23 19:31:35 2013 From: python-checkins at python.org (brett.cannon) Date: Fri, 23 Aug 2013 19:31:35 +0200 (CEST) Subject: [Python-checkins] =?utf-8?q?benchmarks=3A_Issue_=2318586=3A_Remov?= =?utf-8?q?e_the_need_for_make=5Fperf3=2Esh?= Message-ID: <3cM8mR0Gz5z7Lsl@mail.python.org> http://hg.python.org/benchmarks/rev/b868d0a9c5d7 changeset: 205:b868d0a9c5d7 user: Brett Cannon date: Fri Aug 23 13:30:30 2013 -0400 summary: Issue #18586: Remove the need for make_perf3.sh files: lib3/2to3/2to3 | 5 + lib3/2to3/HACKING | 49 + lib3/2to3/README | 255 + lib3/2to3/example.py | 405 + lib3/2to3/lib2to3/Grammar.txt | 158 + lib3/2to3/lib2to3/Grammar3.3.2.final.0.pickle | Bin lib3/2to3/lib2to3/Grammar3.4.0.alpha.1.pickle | Bin lib3/2to3/lib2to3/PatternGrammar.txt | 28 + lib3/2to3/lib2to3/PatternGrammar3.3.2.final.0.pickle | Bin lib3/2to3/lib2to3/PatternGrammar3.4.0.alpha.1.pickle | Bin lib3/2to3/lib2to3/__init__.py | 1 + lib3/2to3/lib2to3/btm_matcher.py | 168 + lib3/2to3/lib2to3/btm_utils.py | 283 + lib3/2to3/lib2to3/fixer_base.py | 189 + lib3/2to3/lib2to3/fixer_util.py | 432 + lib3/2to3/lib2to3/fixes/__init__.py | 1 + lib3/2to3/lib2to3/fixes/fix_apply.py | 59 + lib3/2to3/lib2to3/fixes/fix_basestring.py | 14 + lib3/2to3/lib2to3/fixes/fix_buffer.py | 22 + lib3/2to3/lib2to3/fixes/fix_callable.py | 37 + lib3/2to3/lib2to3/fixes/fix_dict.py | 107 + lib3/2to3/lib2to3/fixes/fix_except.py | 93 + lib3/2to3/lib2to3/fixes/fix_exec.py | 40 + lib3/2to3/lib2to3/fixes/fix_execfile.py | 52 + lib3/2to3/lib2to3/fixes/fix_exitfunc.py | 72 + lib3/2to3/lib2to3/fixes/fix_filter.py | 76 + lib3/2to3/lib2to3/fixes/fix_funcattrs.py | 21 + lib3/2to3/lib2to3/fixes/fix_future.py | 22 + lib3/2to3/lib2to3/fixes/fix_getcwdu.py | 19 + lib3/2to3/lib2to3/fixes/fix_has_key.py | 110 + lib3/2to3/lib2to3/fixes/fix_idioms.py | 152 + lib3/2to3/lib2to3/fixes/fix_import.py | 99 + lib3/2to3/lib2to3/fixes/fix_imports.py | 145 + lib3/2to3/lib2to3/fixes/fix_imports2.py | 16 + lib3/2to3/lib2to3/fixes/fix_input.py | 26 + lib3/2to3/lib2to3/fixes/fix_intern.py | 46 + lib3/2to3/lib2to3/fixes/fix_isinstance.py | 52 + lib3/2to3/lib2to3/fixes/fix_itertools.py | 42 + lib3/2to3/lib2to3/fixes/fix_itertools_imports.py | 56 + lib3/2to3/lib2to3/fixes/fix_long.py | 19 + lib3/2to3/lib2to3/fixes/fix_map.py | 91 + lib3/2to3/lib2to3/fixes/fix_metaclass.py | 228 + lib3/2to3/lib2to3/fixes/fix_methodattrs.py | 24 + lib3/2to3/lib2to3/fixes/fix_ne.py | 23 + lib3/2to3/lib2to3/fixes/fix_next.py | 103 + lib3/2to3/lib2to3/fixes/fix_nonzero.py | 21 + lib3/2to3/lib2to3/fixes/fix_numliterals.py | 28 + lib3/2to3/lib2to3/fixes/fix_operator.py | 99 + lib3/2to3/lib2to3/fixes/fix_paren.py | 44 + lib3/2to3/lib2to3/fixes/fix_print.py | 87 + lib3/2to3/lib2to3/fixes/fix_raise.py | 90 + lib3/2to3/lib2to3/fixes/fix_raw_input.py | 17 + lib3/2to3/lib2to3/fixes/fix_reduce.py | 35 + lib3/2to3/lib2to3/fixes/fix_renames.py | 70 + lib3/2to3/lib2to3/fixes/fix_repr.py | 23 + lib3/2to3/lib2to3/fixes/fix_set_literal.py | 53 + lib3/2to3/lib2to3/fixes/fix_standarderror.py | 18 + lib3/2to3/lib2to3/fixes/fix_sys_exc.py | 30 + lib3/2to3/lib2to3/fixes/fix_throw.py | 56 + lib3/2to3/lib2to3/fixes/fix_tuple_params.py | 175 + lib3/2to3/lib2to3/fixes/fix_types.py | 62 + lib3/2to3/lib2to3/fixes/fix_unicode.py | 25 + lib3/2to3/lib2to3/fixes/fix_urllib.py | 197 + lib3/2to3/lib2to3/fixes/fix_ws_comma.py | 39 + lib3/2to3/lib2to3/fixes/fix_xrange.py | 73 + lib3/2to3/lib2to3/fixes/fix_xreadlines.py | 25 + lib3/2to3/lib2to3/fixes/fix_zip.py | 35 + lib3/2to3/lib2to3/main.py | 182 + lib3/2to3/lib2to3/patcomp.py | 204 + lib3/2to3/lib2to3/pgen2/__init__.py | 4 + lib3/2to3/lib2to3/pgen2/conv.py | 257 + lib3/2to3/lib2to3/pgen2/driver.py | 147 + lib3/2to3/lib2to3/pgen2/grammar.py | 184 + lib3/2to3/lib2to3/pgen2/literals.py | 60 + lib3/2to3/lib2to3/pgen2/parse.py | 201 + lib3/2to3/lib2to3/pgen2/pgen.py | 386 + lib3/2to3/lib2to3/pgen2/token.py | 82 + lib3/2to3/lib2to3/pgen2/tokenize.py | 500 + lib3/2to3/lib2to3/pygram.py | 40 + lib3/2to3/lib2to3/pytree.py | 884 + lib3/2to3/lib2to3/refactor.py | 741 + lib3/2to3/lib2to3/tests/__init__.py | 24 + lib3/2to3/lib2to3/tests/data/README | 6 + lib3/2to3/lib2to3/tests/data/bom.py | 2 + lib3/2to3/lib2to3/tests/data/crlf.py | 3 + lib3/2to3/lib2to3/tests/data/different_encoding.py | 6 + lib3/2to3/lib2to3/tests/data/fixers/bad_order.py | 5 + lib3/2to3/lib2to3/tests/data/fixers/myfixes/__init__.py | 0 lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_explicit.py | 6 + lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_first.py | 6 + lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_last.py | 7 + lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_parrot.py | 13 + lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_preorder.py | 6 + lib3/2to3/lib2to3/tests/data/fixers/no_fixer_cls.py | 1 + lib3/2to3/lib2to3/tests/data/fixers/parrot_example.py | 2 + lib3/2to3/lib2to3/tests/data/infinite_recursion.py | 2669 +++++ lib3/2to3/lib2to3/tests/data/py2_test_grammar.py | 974 ++ lib3/2to3/lib2to3/tests/data/py3_test_grammar.py | 923 ++ lib3/2to3/lib2to3/tests/pytree_idempotency.py | 92 + lib3/2to3/lib2to3/tests/support.py | 54 + lib3/2to3/lib2to3/tests/test_all_fixers.py | 23 + lib3/2to3/lib2to3/tests/test_fixers.py | 4515 ++++++++++ lib3/2to3/lib2to3/tests/test_main.py | 41 + lib3/2to3/lib2to3/tests/test_parser.py | 217 + lib3/2to3/lib2to3/tests/test_pytree.py | 494 + lib3/2to3/lib2to3/tests/test_refactor.py | 281 + lib3/2to3/lib2to3/tests/test_util.py | 594 + lib3/2to3/scripts/benchmark.py | 58 + lib3/2to3/scripts/find_pattern.py | 97 + lib3/2to3/test.py | 41 + lib3/Chameleon-2.9.2/.gitignore | 12 + lib3/Chameleon-2.9.2/CHANGES.rst | 1075 ++ lib3/Chameleon-2.9.2/COPYRIGHT.txt | 7 + lib3/Chameleon-2.9.2/LICENSE.txt | 185 + lib3/Chameleon-2.9.2/Makefile | 89 + lib3/Chameleon-2.9.2/PKG-INFO | 1122 ++ lib3/Chameleon-2.9.2/README.rst | 25 + lib3/Chameleon-2.9.2/benchmarks/bm_chameleon.py | 128 + lib3/Chameleon-2.9.2/benchmarks/bm_mako.py | 153 + lib3/Chameleon-2.9.2/benchmarks/util.py | 51 + lib3/Chameleon-2.9.2/distribute_setup.py | 485 + lib3/Chameleon-2.9.2/docs/conf.py | 194 + lib3/Chameleon-2.9.2/docs/configuration.rst | 43 + lib3/Chameleon-2.9.2/docs/index.rst | 217 + lib3/Chameleon-2.9.2/docs/integration.rst | 41 + lib3/Chameleon-2.9.2/docs/library.rst | 238 + lib3/Chameleon-2.9.2/docs/reference.rst | 1695 +++ lib3/Chameleon-2.9.2/setup.cfg | 14 + lib3/Chameleon-2.9.2/setup.py | 90 + lib3/Chameleon-2.9.2/src/Chameleon.egg-info/PKG-INFO | 1122 ++ lib3/Chameleon-2.9.2/src/Chameleon.egg-info/SOURCES.txt | 380 + lib3/Chameleon-2.9.2/src/Chameleon.egg-info/dependency_links.txt | 1 + lib3/Chameleon-2.9.2/src/Chameleon.egg-info/not-zip-safe | 1 + lib3/Chameleon-2.9.2/src/Chameleon.egg-info/top_level.txt | 1 + lib3/Chameleon-2.9.2/src/chameleon/__init__.py | 5 + lib3/Chameleon-2.9.2/src/chameleon/ast24.py | 135 + lib3/Chameleon-2.9.2/src/chameleon/astutil.py | 926 ++ lib3/Chameleon-2.9.2/src/chameleon/benchmark.py | 478 + lib3/Chameleon-2.9.2/src/chameleon/codegen.py | 221 + lib3/Chameleon-2.9.2/src/chameleon/compiler.py | 1553 +++ lib3/Chameleon-2.9.2/src/chameleon/config.py | 47 + lib3/Chameleon-2.9.2/src/chameleon/exc.py | 289 + lib3/Chameleon-2.9.2/src/chameleon/i18n.py | 120 + lib3/Chameleon-2.9.2/src/chameleon/interfaces.py | 102 + lib3/Chameleon-2.9.2/src/chameleon/loader.py | 174 + lib3/Chameleon-2.9.2/src/chameleon/metal.py | 23 + lib3/Chameleon-2.9.2/src/chameleon/namespaces.py | 9 + lib3/Chameleon-2.9.2/src/chameleon/nodes.py | 210 + lib3/Chameleon-2.9.2/src/chameleon/parser.py | 238 + lib3/Chameleon-2.9.2/src/chameleon/program.py | 38 + lib3/Chameleon-2.9.2/src/chameleon/py25.py | 36 + lib3/Chameleon-2.9.2/src/chameleon/py26.py | 15 + lib3/Chameleon-2.9.2/src/chameleon/tal.py | 479 + lib3/Chameleon-2.9.2/src/chameleon/tales.py | 541 + lib3/Chameleon-2.9.2/src/chameleon/template.py | 332 + lib3/Chameleon-2.9.2/src/chameleon/tests/__init__.py | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-interpolation.txt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-variable-scope.html | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-variable-scope.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/002-repeat-scope.pt | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/002.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/003-content.pt | 17 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/003.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/004-attributes.pt | 18 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/004.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/005-default.pt | 12 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/005.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/006-attribute-interpolation.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/006.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/007-content-interpolation.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/007.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/008-builtins.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/008.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/009-literals.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/009.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/010-structure.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/010.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/011-messages.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/011.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/012-translation.pt | 21 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/012.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/013-repeat-nested.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/013.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/014-repeat-nested-similar.pt | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/014.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/015-translation-nested.pt | 10 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/015.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/016-explicit-translation.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/016.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/017-omit-tag.pt | 12 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/017.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/018-translation-nested-dynamic.pt | 13 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/018.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/019-replace.pt | 13 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/019.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/020-on-error.pt | 10 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/020.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/021-translation-domain.pt | 16 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/021.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/022-switch.pt | 13 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/022.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/023-condition.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/023.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/024-namespace-elements.pt | 16 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/024.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/025-repeat-whitespace.pt | 14 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/025.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/026-repeat-variable.pt | 13 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/026.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/027-attribute-replacement.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/027.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/028-attribute-toggle.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/028.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/029-attribute-ordering.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/029.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/030-repeat-tuples.pt | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/030.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/031-namespace-with-tal.pt | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/031.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/032-master-template.pt | 20 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/032.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/033-use-macro-trivial.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/033.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/034-use-template-as-macro.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/034.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/035-use-macro-with-fill-slot.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/035.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/036-use-macro-inherits-dynamic-scope.pt | 2 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/036.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/037-use-macro-local-variable-scope.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/037.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/038-use-macro-globals.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/038.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/039-globals.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/039.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/040-macro-using-template-symbol.pt | 20 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/040.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/041-translate-nested-names.pt | 22 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/041.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/042-use-macro-fill-footer.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/042.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/043-macro-nested-dynamic-vars.pt | 19 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/043.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/044-tuple-define.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/044.xml | 10 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/045-namespaces.pt | 13 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/045.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/046-extend-macro.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/046.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/047-use-extended-macro.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/047.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/048-use-extended-macro-fill-original.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/048.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/049-entities-in-attributes.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/049.xml | Bin lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/050-define-macro-and-use-not-extend.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/050.xml | Bin lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/051-use-non-extended-macro.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/051.xml | Bin lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/052-i18n-domain-inside-filled-slot.pt | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/052.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/053-special-characters-in-attributes.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/053.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/054-import-expression.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/054.xml | 10 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/055-attribute-fallback-to-dict-lookup.pt | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/055.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/056-comment-attribute.pt | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/056.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/057-order.pt | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/057.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/058-script.pt | 16 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/058.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/059-embedded-javascript.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/059.xml | 10 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/060-macro-with-multiple-same-slots.pt | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/060.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/061-fill-one-slot-but-two-defined.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/061.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/062-comments-and-expressions.pt | 27 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/062.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/063-continuation.pt | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/063.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/064-tags-and-special-characters.pt | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/064.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/065-use-macro-in-fill.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/065.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/066-load-expression.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/066.xml | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/067-attribute-decode.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/067.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/068-less-than-greater-than-in-attributes.pt | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/068.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/069-translation-domain-and-macro.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/069.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/070-translation-domain-and-use-macro.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/070.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/071-html-attribute-defaults.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/071.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/072-repeat-interpolation.pt | 13 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/072.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/073-utf8-encoded.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/073.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/074-encoded-template.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/074.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/075-nested-macros.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/075.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/076-nested-macro-override.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/076.xml | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/077-i18n-attributes.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/077.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/078-tags-and-newlines.pt | 23 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/078.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/079-implicit-i18n.pt | 16 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/079.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/080-xmlns-namespace-on-tal.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/080.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/081-load-spec.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/081.xml | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/082-load-spec-computed.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/082.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/083-template-dict-to-macro.pt | 2 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/083.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/084-interpolation-in-cdata.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/084.xml | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/085-nested-translation.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/085.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/086-self-closing.pt | 10 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/086.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/087-code-blocks.pt | 28 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/087.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/088-python-newlines.pt | 2 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/088.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/089.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/090.xml | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/091.xml | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/092.xml | 10 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/093.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/094.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/095.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/096.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/097.xml | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/098.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/099.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/100.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/101-unclosed-tags.html | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/101.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/102-unquoted-attributes.html | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/102.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/103-simple-attribute.html | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/103.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/104.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/105.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/106.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/107.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/108.xml | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/109.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/110.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/111.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/112.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/113.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/114.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/115.xml | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/116.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/117.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/118.xml | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/119.xml | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/greeting.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/hello_world.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/hello_world.txt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.html | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.txt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/002.pt | 13 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/003.pt | 17 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/004.pt | 18 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/005.pt | 12 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/006.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/007.pt | 14 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/008.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/009.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/010.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/011-en.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/011.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/012-en.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/012.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/013.pt | 22 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/014.pt | 12 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/015-en.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/015.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/016-en.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/016.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/017.pt | 12 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/018-en.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/018.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/019.pt | 13 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/020.pt | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/021-en.pt | 12 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/021.pt | 12 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/022.pt | 13 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/023.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/024.pt | 14 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/025.pt | 23 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/026.pt | 17 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/027.pt | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/028.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/029.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/030.pt | 10 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/031.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/032.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/033.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/034.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/035.pt | 17 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/036.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/037.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/038.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/039.pt | 0 lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/040.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/041.pt | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/042.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/043.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/044.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/045.pt | 12 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/046.pt | 17 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/047.pt | 17 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/048.pt | 17 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/049.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/050.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/051.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/052.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/053.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/054.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/055.pt | 4 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/056.pt | 7 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/057.pt | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/058.pt | 16 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/059.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/060.pt | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/061.pt | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/062.pt | 27 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/063.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/064.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/065.pt | 13 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/066.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/067.pt | 6 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/068.pt | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/069-en.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/069.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/070-en.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/070.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/071.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/072.pt | 19 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/073.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/074.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/075.pt | 19 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/076.pt | 17 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/077-en.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/077.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/078.pt | 11 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/079-en.pt | 16 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/079.pt | 16 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/080.pt | 3 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/081.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/082.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/083.pt | 15 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/084.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/085-en.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/085.pt | 9 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/086.pt | 18 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/087.pt | 25 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/088.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/101.html | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/102.html | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/103.html | 8 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/greeting.pt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/hello_world.pt | 5 + lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/hello_world.txt | 1 + lib3/Chameleon-2.9.2/src/chameleon/tests/test_doctests.py | 40 + lib3/Chameleon-2.9.2/src/chameleon/tests/test_loader.py | 79 + lib3/Chameleon-2.9.2/src/chameleon/tests/test_parser.py | 92 + lib3/Chameleon-2.9.2/src/chameleon/tests/test_sniffing.py | 124 + lib3/Chameleon-2.9.2/src/chameleon/tests/test_templates.py | 679 + lib3/Chameleon-2.9.2/src/chameleon/tests/test_tokenizer.py | 47 + lib3/Chameleon-2.9.2/src/chameleon/tokenize.py | 144 + lib3/Chameleon-2.9.2/src/chameleon/utils.py | 429 + lib3/Chameleon-2.9.2/src/chameleon/zpt/__init__.py | 1 + lib3/Chameleon-2.9.2/src/chameleon/zpt/loader.py | 30 + lib3/Chameleon-2.9.2/src/chameleon/zpt/program.py | 751 + lib3/Chameleon-2.9.2/src/chameleon/zpt/template.py | 409 + lib3/Chameleon-2.9.2/src/ordereddict.py | 127 + lib3/Chameleon-2.9.2/src/pkg_resources.py | 2838 ++++++ lib3/Chameleon-2.9.2/tox.ini | 53 + lib3/Mako-0.7.3/CHANGES | 847 + lib3/Mako-0.7.3/LICENSE | 20 + lib3/Mako-0.7.3/MANIFEST.in | 11 + lib3/Mako-0.7.3/Mako.egg-info/PKG-INFO | 71 + lib3/Mako-0.7.3/Mako.egg-info/SOURCES.txt | 171 + lib3/Mako-0.7.3/Mako.egg-info/dependency_links.txt | 1 + lib3/Mako-0.7.3/Mako.egg-info/entry_points.txt | 14 + lib3/Mako-0.7.3/Mako.egg-info/not-zip-safe | 1 + lib3/Mako-0.7.3/Mako.egg-info/requires.txt | 4 + lib3/Mako-0.7.3/Mako.egg-info/top_level.txt | 1 + lib3/Mako-0.7.3/PKG-INFO | 71 + lib3/Mako-0.7.3/README.rst | 52 + lib3/Mako-0.7.3/distribute_setup.py | 485 + lib3/Mako-0.7.3/doc/_sources/caching.txt | 393 + lib3/Mako-0.7.3/doc/_sources/defs.txt | 622 + lib3/Mako-0.7.3/doc/_sources/filtering.txt | 344 + lib3/Mako-0.7.3/doc/_sources/index.txt | 22 + lib3/Mako-0.7.3/doc/_sources/inheritance.txt | 534 + lib3/Mako-0.7.3/doc/_sources/namespaces.txt | 349 + lib3/Mako-0.7.3/doc/_sources/runtime.txt | 448 + lib3/Mako-0.7.3/doc/_sources/syntax.txt | 469 + lib3/Mako-0.7.3/doc/_sources/unicode.txt | 345 + lib3/Mako-0.7.3/doc/_sources/usage.txt | 520 + lib3/Mako-0.7.3/doc/_static/basic.css | 540 + lib3/Mako-0.7.3/doc/_static/comment-bright.png | Bin lib3/Mako-0.7.3/doc/_static/comment-close.png | Bin lib3/Mako-0.7.3/doc/_static/comment.png | Bin lib3/Mako-0.7.3/doc/_static/default.css | 256 + lib3/Mako-0.7.3/doc/_static/docs.css | 438 + lib3/Mako-0.7.3/doc/_static/doctools.js | 247 + lib3/Mako-0.7.3/doc/_static/down-pressed.png | Bin lib3/Mako-0.7.3/doc/_static/down.png | Bin lib3/Mako-0.7.3/doc/_static/file.png | Bin lib3/Mako-0.7.3/doc/_static/jquery.js | 154 + lib3/Mako-0.7.3/doc/_static/makoLogo.png | Bin lib3/Mako-0.7.3/doc/_static/minus.png | Bin lib3/Mako-0.7.3/doc/_static/plus.png | Bin lib3/Mako-0.7.3/doc/_static/pygments.css | 62 + lib3/Mako-0.7.3/doc/_static/searchtools.js | 560 + lib3/Mako-0.7.3/doc/_static/sidebar.js | 151 + lib3/Mako-0.7.3/doc/_static/site.css | 86 + lib3/Mako-0.7.3/doc/_static/underscore.js | 23 + lib3/Mako-0.7.3/doc/_static/up-pressed.png | Bin lib3/Mako-0.7.3/doc/_static/up.png | Bin lib3/Mako-0.7.3/doc/_static/websupport.js | 808 + lib3/Mako-0.7.3/doc/build/Makefile | 137 + lib3/Mako-0.7.3/doc/build/builder/__init__.py | 0 lib3/Mako-0.7.3/doc/build/builder/builders.py | 97 + lib3/Mako-0.7.3/doc/build/builder/util.py | 12 + lib3/Mako-0.7.3/doc/build/caching.rst | 393 + lib3/Mako-0.7.3/doc/build/conf.py | 287 + lib3/Mako-0.7.3/doc/build/defs.rst | 622 + lib3/Mako-0.7.3/doc/build/filtering.rst | 344 + lib3/Mako-0.7.3/doc/build/index.rst | 22 + lib3/Mako-0.7.3/doc/build/inheritance.rst | 534 + lib3/Mako-0.7.3/doc/build/namespaces.rst | 349 + lib3/Mako-0.7.3/doc/build/runtime.rst | 448 + lib3/Mako-0.7.3/doc/build/static/docs.css | 438 + lib3/Mako-0.7.3/doc/build/static/makoLogo.png | Bin lib3/Mako-0.7.3/doc/build/static/site.css | 86 + lib3/Mako-0.7.3/doc/build/syntax.rst | 469 + lib3/Mako-0.7.3/doc/build/templates/base.mako | 60 + lib3/Mako-0.7.3/doc/build/templates/genindex.mako | 77 + lib3/Mako-0.7.3/doc/build/templates/layout.mako | 199 + lib3/Mako-0.7.3/doc/build/templates/page.mako | 2 + lib3/Mako-0.7.3/doc/build/templates/rtd_layout.mako | 174 + lib3/Mako-0.7.3/doc/build/templates/search.mako | 25 + lib3/Mako-0.7.3/doc/build/unicode.rst | 345 + lib3/Mako-0.7.3/doc/build/usage.rst | 520 + lib3/Mako-0.7.3/doc/caching.html | 779 + lib3/Mako-0.7.3/doc/defs.html | 728 + lib3/Mako-0.7.3/doc/filtering.html | 478 + lib3/Mako-0.7.3/doc/genindex.html | 916 ++ lib3/Mako-0.7.3/doc/index.html | 230 + lib3/Mako-0.7.3/doc/inheritance.html | 673 + lib3/Mako-0.7.3/doc/namespaces.html | 649 + lib3/Mako-0.7.3/doc/runtime.html | 710 + lib3/Mako-0.7.3/doc/search.html | 162 + lib3/Mako-0.7.3/doc/searchindex.js | 1 + lib3/Mako-0.7.3/doc/syntax.html | 596 + lib3/Mako-0.7.3/doc/unicode.html | 476 + lib3/Mako-0.7.3/doc/usage.html | 1057 ++ lib3/Mako-0.7.3/examples/bench/basic.py | 191 + lib3/Mako-0.7.3/examples/bench/cheetah/footer.tmpl | 2 + lib3/Mako-0.7.3/examples/bench/cheetah/header.tmpl | 5 + lib3/Mako-0.7.3/examples/bench/cheetah/template.tmpl | 31 + lib3/Mako-0.7.3/examples/bench/django/templatetags/__init__.py | 0 lib3/Mako-0.7.3/examples/bench/django/templatetags/bench.py | 8 + lib3/Mako-0.7.3/examples/bench/kid/base.kid | 15 + lib3/Mako-0.7.3/examples/bench/kid/template.kid | 22 + lib3/Mako-0.7.3/examples/bench/myghty/base.myt | 29 + lib3/Mako-0.7.3/examples/bench/myghty/template.myt | 30 + lib3/Mako-0.7.3/examples/wsgi/run_wsgi.py | 78 + lib3/Mako-0.7.3/mako/__init__.py | 9 + lib3/Mako-0.7.3/mako/_ast_util.py | 839 + lib3/Mako-0.7.3/mako/ast.py | 151 + lib3/Mako-0.7.3/mako/cache.py | 236 + lib3/Mako-0.7.3/mako/codegen.py | 1215 ++ lib3/Mako-0.7.3/mako/exceptions.py | 362 + lib3/Mako-0.7.3/mako/ext/__init__.py | 0 lib3/Mako-0.7.3/mako/ext/autohandler.py | 65 + lib3/Mako-0.7.3/mako/ext/babelplugin.py | 132 + lib3/Mako-0.7.3/mako/ext/beaker_cache.py | 70 + lib3/Mako-0.7.3/mako/ext/preprocessors.py | 20 + lib3/Mako-0.7.3/mako/ext/pygmentplugin.py | 122 + lib3/Mako-0.7.3/mako/ext/turbogears.py | 57 + lib3/Mako-0.7.3/mako/filters.py | 191 + lib3/Mako-0.7.3/mako/lexer.py | 442 + lib3/Mako-0.7.3/mako/lookup.py | 354 + lib3/Mako-0.7.3/mako/parsetree.py | 594 + lib3/Mako-0.7.3/mako/pygen.py | 283 + lib3/Mako-0.7.3/mako/pyparser.py | 551 + lib3/Mako-0.7.3/mako/runtime.py | 842 + lib3/Mako-0.7.3/mako/template.py | 650 + lib3/Mako-0.7.3/mako/util.py | 437 + lib3/Mako-0.7.3/scripts/mako-render | 46 + lib3/Mako-0.7.3/setup.cfg | 5 + lib3/Mako-0.7.3/setup.py | 62 + lib3/Mako-0.7.3/test/__init__.py | 145 + lib3/Mako-0.7.3/test/foo/__init__.py | 0 lib3/Mako-0.7.3/test/foo/test_ns.py | 7 + lib3/Mako-0.7.3/test/sample_module_namespace.py | 7 + lib3/Mako-0.7.3/test/templates/badbom.html | 2 + lib3/Mako-0.7.3/test/templates/bom.html | 1 + lib3/Mako-0.7.3/test/templates/bommagic.html | 2 + lib3/Mako-0.7.3/test/templates/chs_unicode.html | 11 + lib3/Mako-0.7.3/test/templates/chs_unicode_py3k.html | 11 + lib3/Mako-0.7.3/test/templates/chs_utf8.html | 17 + lib3/Mako-0.7.3/test/templates/crlf.html | 19 + lib3/Mako-0.7.3/test/templates/foo/modtest.html.py | 25 + lib3/Mako-0.7.3/test/templates/gettext.mako | 89 + lib3/Mako-0.7.3/test/templates/index.html | 1 + lib3/Mako-0.7.3/test/templates/internationalization.html | 920 ++ lib3/Mako-0.7.3/test/templates/modtest.html | 1 + lib3/Mako-0.7.3/test/templates/othersubdir/foo.html | 0 lib3/Mako-0.7.3/test/templates/read_unicode.html | 10 + lib3/Mako-0.7.3/test/templates/read_unicode_py3k.html | 10 + lib3/Mako-0.7.3/test/templates/runtimeerr.html | 4 + lib3/Mako-0.7.3/test/templates/runtimeerr_py3k.html | 4 + lib3/Mako-0.7.3/test/templates/subdir/foo/modtest.html.py | 25 + lib3/Mako-0.7.3/test/templates/subdir/incl.html | 2 + lib3/Mako-0.7.3/test/templates/subdir/index.html | 3 + lib3/Mako-0.7.3/test/templates/subdir/modtest.html | 1 + lib3/Mako-0.7.3/test/templates/unicode.html | 2 + lib3/Mako-0.7.3/test/templates/unicode_arguments.html | 10 + lib3/Mako-0.7.3/test/templates/unicode_arguments_py3k.html | 10 + lib3/Mako-0.7.3/test/templates/unicode_code.html | 7 + lib3/Mako-0.7.3/test/templates/unicode_code_py3k.html | 7 + lib3/Mako-0.7.3/test/templates/unicode_expr.html | 2 + lib3/Mako-0.7.3/test/templates/unicode_expr_py3k.html | 2 + lib3/Mako-0.7.3/test/templates/unicode_runtime_error.html | 2 + lib3/Mako-0.7.3/test/templates/unicode_syntax_error.html | 2 + lib3/Mako-0.7.3/test/test_ast.py | 334 + lib3/Mako-0.7.3/test/test_babelplugin.py | 44 + lib3/Mako-0.7.3/test/test_block.py | 569 + lib3/Mako-0.7.3/test/test_cache.py | 557 + lib3/Mako-0.7.3/test/test_call.py | 515 + lib3/Mako-0.7.3/test/test_decorators.py | 110 + lib3/Mako-0.7.3/test/test_def.py | 678 + lib3/Mako-0.7.3/test/test_exceptions.py | 299 + lib3/Mako-0.7.3/test/test_filters.py | 335 + lib3/Mako-0.7.3/test/test_inheritance.py | 350 + lib3/Mako-0.7.3/test/test_lexer.py | 871 + lib3/Mako-0.7.3/test/test_lookup.py | 104 + lib3/Mako-0.7.3/test/test_loop.py | 295 + lib3/Mako-0.7.3/test/test_lru.py | 111 + lib3/Mako-0.7.3/test/test_namespace.py | 792 + lib3/Mako-0.7.3/test/test_pygen.py | 252 + lib3/Mako-0.7.3/test/test_template.py | 1242 ++ lib3/Mako-0.7.3/test/test_tgplugin.py | 42 + lib3/Mako-0.7.3/test/test_util.py | 50 + lib3/Mako-0.7.3/test/util.py | 7 + lib3/mako-0.3.6/.hgignore | 7 + lib3/mako-0.3.6/CHANGES | 590 + lib3/mako-0.3.6/LICENSE | 20 + lib3/mako-0.3.6/MANIFEST.in | 12 + lib3/mako-0.3.6/README | 25 + lib3/mako-0.3.6/README.py3k | 56 + lib3/mako-0.3.6/distribute_setup.py | 485 + lib3/mako-0.3.6/doc/build/Makefile | 143 + lib3/mako-0.3.6/doc/build/builder/builders.py | 69 + lib3/mako-0.3.6/doc/build/builder/util.py | 12 + lib3/mako-0.3.6/doc/build/caching.rst | 127 + lib3/mako-0.3.6/doc/build/conf.py | 280 + lib3/mako-0.3.6/doc/build/defs.rst | 436 + lib3/mako-0.3.6/doc/build/filtering.rst | 340 + lib3/mako-0.3.6/doc/build/index.rst | 21 + lib3/mako-0.3.6/doc/build/inheritance.rst | 321 + lib3/mako-0.3.6/doc/build/namespaces.rst | 341 + lib3/mako-0.3.6/doc/build/runtime.rst | 238 + lib3/mako-0.3.6/doc/build/static/docs.css | 288 + lib3/mako-0.3.6/doc/build/syntax.rst | 417 + lib3/mako-0.3.6/doc/build/templates/genindex.mako | 72 + lib3/mako-0.3.6/doc/build/templates/layout.mako | 130 + lib3/mako-0.3.6/doc/build/templates/page.mako | 2 + lib3/mako-0.3.6/doc/build/templates/search.mako | 22 + lib3/mako-0.3.6/doc/build/templates/site_base.mako | 30 + lib3/mako-0.3.6/doc/build/templates/static_base.mako | 19 + lib3/mako-0.3.6/doc/build/unicode.rst | 337 + lib3/mako-0.3.6/doc/build/usage.rst | 484 + lib3/mako-0.3.6/examples/bench/basic.py | 161 + lib3/mako-0.3.6/examples/bench/basic.py.orig | 161 + lib3/mako-0.3.6/examples/bench/cheetah/footer.tmpl | 2 + lib3/mako-0.3.6/examples/bench/cheetah/header.tmpl | 5 + lib3/mako-0.3.6/examples/bench/cheetah/template.tmpl | 31 + lib3/mako-0.3.6/examples/bench/django/templates/base.html | 14 + lib3/mako-0.3.6/examples/bench/django/templates/template.html | 22 + lib3/mako-0.3.6/examples/bench/django/templatetags/bench.py | 8 + lib3/mako-0.3.6/examples/bench/genshi/base.html | 17 + lib3/mako-0.3.6/examples/bench/genshi/template.html | 24 + lib3/mako-0.3.6/examples/bench/kid/base.kid | 15 + lib3/mako-0.3.6/examples/bench/kid/template.kid | 22 + lib3/mako-0.3.6/examples/bench/mako/footer.html | 2 + lib3/mako-0.3.6/examples/bench/mako/header.html | 5 + lib3/mako-0.3.6/examples/bench/mako/template.html | 31 + lib3/mako-0.3.6/examples/bench/mako_inheritance/base.html | 24 + lib3/mako-0.3.6/examples/bench/mako_inheritance/template.html | 15 + lib3/mako-0.3.6/examples/bench/myghty/base.myt | 29 + lib3/mako-0.3.6/examples/bench/myghty/template.myt | 30 + lib3/mako-0.3.6/examples/wsgi/htdocs/index.html | 8 + lib3/mako-0.3.6/examples/wsgi/run_wsgi.py | 78 + lib3/mako-0.3.6/examples/wsgi/templates/root.html | 7 + lib3/mako-0.3.6/mako/__init__.py | 9 + lib3/mako-0.3.6/mako/_ast_util.py | 834 + lib3/mako-0.3.6/mako/ast.py | 143 + lib3/mako-0.3.6/mako/cache.py | 118 + lib3/mako-0.3.6/mako/codegen.py | 958 ++ lib3/mako-0.3.6/mako/exceptions.py | 309 + lib3/mako-0.3.6/mako/ext/autohandler.py | 59 + lib3/mako-0.3.6/mako/ext/babelplugin.py | 123 + lib3/mako-0.3.6/mako/ext/preprocessors.py | 14 + lib3/mako-0.3.6/mako/ext/pygmentplugin.py | 101 + lib3/mako-0.3.6/mako/ext/turbogears.py | 50 + lib3/mako-0.3.6/mako/filters.py | 189 + lib3/mako-0.3.6/mako/lexer.py | 415 + lib3/mako-0.3.6/mako/lookup.py | 321 + lib3/mako-0.3.6/mako/parsetree.py | 497 + lib3/mako-0.3.6/mako/pygen.py | 285 + lib3/mako-0.3.6/mako/pyparser.py | 533 + lib3/mako-0.3.6/mako/runtime.py | 651 + lib3/mako-0.3.6/mako/template.py | 510 + lib3/mako-0.3.6/mako/util.py | 315 + lib3/mako-0.3.6/scripts/mako-render | 38 + lib3/mako-0.3.6/setup.cfg | 2 + lib3/mako-0.3.6/setup.py | 68 + lib3/mako-0.3.6/test/__init__.py | 93 + lib3/mako-0.3.6/test/foo/test_ns.py | 7 + lib3/mako-0.3.6/test/sample_module_namespace.py | 7 + lib3/mako-0.3.6/test/templates/badbom.html | 2 + lib3/mako-0.3.6/test/templates/bom.html | 1 + lib3/mako-0.3.6/test/templates/bommagic.html | 2 + lib3/mako-0.3.6/test/templates/chs_unicode.html | 11 + lib3/mako-0.3.6/test/templates/chs_unicode_py3k.html | 11 + lib3/mako-0.3.6/test/templates/chs_utf8.html | 17 + lib3/mako-0.3.6/test/templates/crlf.html | 19 + lib3/mako-0.3.6/test/templates/foo/modtest.html.py | 25 + lib3/mako-0.3.6/test/templates/gettext.mako | 83 + lib3/mako-0.3.6/test/templates/index.html | 1 + lib3/mako-0.3.6/test/templates/internationalization.html | 920 ++ lib3/mako-0.3.6/test/templates/modtest.html | 1 + lib3/mako-0.3.6/test/templates/read_unicode.html | 10 + lib3/mako-0.3.6/test/templates/read_unicode_py3k.html | 10 + lib3/mako-0.3.6/test/templates/runtimeerr.html | 4 + lib3/mako-0.3.6/test/templates/runtimeerr_py3k.html | 4 + lib3/mako-0.3.6/test/templates/subdir/foo/modtest.html.py | 25 + lib3/mako-0.3.6/test/templates/subdir/incl.html | 2 + lib3/mako-0.3.6/test/templates/subdir/index.html | 3 + lib3/mako-0.3.6/test/templates/subdir/modtest.html | 1 + lib3/mako-0.3.6/test/templates/unicode.html | 2 + lib3/mako-0.3.6/test/templates/unicode_arguments.html | 10 + lib3/mako-0.3.6/test/templates/unicode_arguments_py3k.html | 10 + lib3/mako-0.3.6/test/templates/unicode_code.html | 7 + lib3/mako-0.3.6/test/templates/unicode_code_py3k.html | 7 + lib3/mako-0.3.6/test/templates/unicode_expr.html | 2 + lib3/mako-0.3.6/test/templates/unicode_expr_py3k.html | 2 + lib3/mako-0.3.6/test/templates/unicode_runtime_error.html | 2 + lib3/mako-0.3.6/test/templates/unicode_syntax_error.html | 2 + lib3/mako-0.3.6/test/test_ast.py | 273 + lib3/mako-0.3.6/test/test_babelplugin.py | 42 + lib3/mako-0.3.6/test/test_cache.py | 404 + lib3/mako-0.3.6/test/test_call.py | 447 + lib3/mako-0.3.6/test/test_decorators.py | 110 + lib3/mako-0.3.6/test/test_def.py | 550 + lib3/mako-0.3.6/test/test_exceptions.py | 188 + lib3/mako-0.3.6/test/test_filters.py | 290 + lib3/mako-0.3.6/test/test_inheritance.py | 350 + lib3/mako-0.3.6/test/test_lexer.py | 854 + lib3/mako-0.3.6/test/test_lookup.py | 65 + lib3/mako-0.3.6/test/test_lru.py | 111 + lib3/mako-0.3.6/test/test_namespace.py | 792 + lib3/mako-0.3.6/test/test_pygen.py | 252 + lib3/mako-0.3.6/test/test_template.py | 936 ++ lib3/mako-0.3.6/test/test_tgplugin.py | 42 + lib3/mako-0.3.6/test/util.py | 7 + lib3/pkg_resources.py | 2838 ------ make_perf3.sh | 65 - perf.py | 14 +- 790 files changed, 100762 insertions(+), 2908 deletions(-) diff --git a/lib3/2to3/2to3 b/lib3/2to3/2to3 new file mode 100755 --- /dev/null +++ b/lib3/2to3/2to3 @@ -0,0 +1,5 @@ +#!/usr/bin/env python +import sys +from lib2to3.main import main + +sys.exit(main("lib2to3.fixes")) diff --git a/lib3/2to3/HACKING b/lib3/2to3/HACKING new file mode 100644 --- /dev/null +++ b/lib3/2to3/HACKING @@ -0,0 +1,49 @@ +Tips/tricks/hints for writing new fixers: + + * Don't write your own PATTERN from scratch; that's what + scripts/find_pattern.py is for. + + * If your fixer works by changing a node's children list or a leaf's value, + be sure to call the node/leaf's changed() method. This to be sure the main + script will recognize that the tree has changed. + + +Putting 2to3 to work somewhere else: + + * By default, 2to3 uses a merger of Python 2.x and Python 3's grammars. If + you want to support a different grammar, just replace the Grammar.txt file + with Grammar/Grammar from your chosen Python version. + + * The real heart of 2to3 is the concrete syntax tree parser in pgen2; this + chunk of the system is suitable for a wide range of applications that + require CST transformation. All that's required is to rip off the fixer + layer and replace it with something else that walks the tree. One + application would be a tool to check/enforce style guidelines; this could + leverage 90% of the existing infrastructure with primarily cosmetic + changes (e.g., fixes/fix_*.py -> styles/style_*.py). + + +TODO + + Simple: + ####### + + * Refactor common code out of fixes/fix_*.py into fixer_util (on-going). + + * Document how to write fixers. + + + Complex: + ######## + + * Come up with a scheme to hide the details of suite indentation (some kind + of custom pytree node for suites, probably). This will automatically + reindent all code with spaces, tied into a refactor.py flag that allows + you to specify the indent level. + + * Remove the need to explicitly assign a node's parent attribute. This + could be gone with a magic children list. + + * Import statements are complicated and a pain to handle, and there are many + fixers that manipulate them. It would be nice to have a little API for + manipulating imports in fixers. diff --git a/lib3/2to3/README b/lib3/2to3/README new file mode 100644 --- /dev/null +++ b/lib3/2to3/README @@ -0,0 +1,255 @@ +Abstract +======== + +A refactoring tool for converting Python 2.x code to 3.x. + +This is a work in progress! Bugs should be reported to http://bugs.python.org/ +under the "2to3" category. + + +General usage +============= + +Run ``./2to3`` to convert stdin (``-``), files or directories given as +arguments. + +2to3 must be run with at least Python 2.5. The intended path for migrating to +Python 3.x is to first migrate to 2.6 (in order to take advantage of Python +2.6's runtime compatibility checks). + + +Files +===== + +README - this file +lib2to3/refactor.py - main program; use this to convert files or directory trees +test.py - runs all unittests for 2to3 +lib2to3/patcomp.py - pattern compiler +lib2to3/pytree.py - parse tree nodes (not specific to Python, despite the name!) +lib2to3/pygram.py - code specific to the Python grammar +scripts/example.py - example input for play.py and fix_*.py +scripts/find_pattern.py - script to help determine the PATTERN for a new fix +lib2to3/Grammar.txt - Python grammar input (accepts 2.x and 3.x syntax) +lib2to3/Grammar.pickle - pickled grammar tables (generated file, not in subversion) +lib2to3/PatternGrammar.txt - grammar for the pattern language used by patcomp.py +lib2to3/PatternGrammar.pickle - pickled pattern grammar tables (generated file) +lib2to3/pgen2/ - Parser generator and driver ([1]_, [2]_) +lib2to3/fixes/ - Individual transformations +lib2to3/tests/ - Test files for pytree, fixers, grammar, etc + + +Capabilities +============ + +A quick run-through of 2to3's current fixers: + +* **fix_apply** - convert apply() calls to real function calls. + +* **fix_callable** - converts callable(obj) into hasattr(obj, '__call__'). + +* **fix_dict** - fix up dict.keys(), .values(), .items() and their iterator + versions. + +* **fix_except** - adjust "except" statements to Python 3 syntax (PEP 3110). + +* **fix_exec** - convert "exec" statements to exec() function calls. + +* **fix_execfile** - execfile(filename, ...) -> exec(open(filename).read()) + +* **fix_filter** - changes filter(F, X) into list(filter(F, X)). + +* **fix_funcattrs** - fix function attribute names (f.func_x -> f.__x__). + +* **fix_has_key** - "d.has_key(x)" -> "x in d". + +* **fix_idioms** - convert type(x) == T to isinstance(x, T), "while 1:" to + "while True:", plus others. This fixer must be explicitly requested + with "-f idioms". + +* **fix_imports** - Fix (some) incompatible imports. + +* **fix_imports2** - Fix (some) incompatible imports that must run after + **test_imports**. + +* **fix_input** - "input()" -> "eval(input())" (PEP 3111). + +* **fix_intern** - "intern(x)" -> "sys.intern(x)". + +* **fix_long** - remove all usage of explicit longs in favor of ints. + +* **fix_map** - generally changes map(F, ...) into list(map(F, ...)). + +* **fix_ne** - convert the "<>" operator to "!=". + +* **fix_next** - fixer for it.next() -> next(it) (PEP 3114). + +* **fix_nonzero** - convert __nonzero__() methods to __bool__() methods. + +* **fix_numliterals** - tweak certain numeric literals to be 3.0-compliant. + +* **fix_paren** - Add parentheses to places where they are needed in list + comprehensions and generator expressions. + +* **fix_operator** - fixer for functions gone from the operator module. + +* **fix_print** - convert "print" statements to print() function calls. + +* **fix_raise** - convert "raise" statements to Python 3 syntax (PEP 3109). + +* **fix_raw_input** - "raw_input()" -> "input()" (PEP 3111). + +* **fix_repr** - swap backticks for repr() calls. + +* **fix_standarderror** - StandardError -> Exception. + +* **fix_sys_exc** - Converts * **"sys.exc_info", "sys.exc_type", and + "sys.exc_value" to sys.exc_info() + +* **fix_throw** - fix generator.throw() calls to be 3.0-compliant (PEP 3109). + +* **fix_tuple_params** - remove tuple parameters from function, method and + lambda declarations (PEP 3113). + +* **fix_unicode** - convert, e.g., u"..." to "...", unicode(x) to str(x), etc. + +* **fix_urllib** - Fix imports for urllib and urllib2. + +* **fix_xrange** - "xrange()" -> "range()". + +* **fix_xreadlines** - "for x in f.xreadlines():" -> "for x in f:". Also, + "g(f.xreadlines)" -> "g(f.__iter__)". + +* **fix_metaclass** - move __metaclass__ = M to class X(metaclass=M) + + +Limitations +=========== + +General Limitations +------------------- + +* In general, fixers that convert a function or method call will not detect + something like :: + + a = apply + a(f, *args) + + or :: + + m = d.has_key + if m(5): + ... + +* Fixers that look for attribute references will not detect when getattr() or + setattr() is used to access those attributes. + +* The contents of eval() calls and "exec" statements will not be checked by + 2to3. + + +Caveats for Specific Fixers +--------------------------- + +fix_except +'''''''''' + +"except" statements like :: + + except Exception, (a, b): + ... + +are not fixed up. The ability to treat exceptions as sequences is being +removed in Python 3, so there is no straightforward, automatic way to +adjust these statements. + +This is seen frequently when dealing with OSError. + + +fix_filter +'''''''''' + +The transformation is not correct if the original code depended on +filter(F, X) returning a string if X is a string (or a tuple if X is a +tuple, etc). That would require type inference, which we don't do. Python +2.6's Python 3 compatibility mode should be used to detect such cases. + + +fix_has_key +''''''''''' + +While the primary target of this fixer is dict.has_key(), the +fixer will change any has_key() method call, regardless of what class it +belongs to. Anyone using non-dictionary classes with has_key() methods is +advised to pay close attention when using this fixer. + + +fix_map +''''''' + +The transformation is not correct if the original code was depending on +map(F, X, Y, ...) to go on until the longest argument is exhausted, +substituting None for missing values -- like zip(), it now stops as +soon as the shortest argument is exhausted. + + +fix_raise +''''''''' + +"raise E, V" will be incorrectly translated if V is an exception instance. +The correct Python 3 idiom is :: + + raise E from V + +but since we can't detect instance-hood by syntax alone and since any client +code would have to be changed as well, we don't automate this. + +Another translation problem is this: :: + + t = ((E, E2), E3) + raise t + +2to3 has no way of knowing that t is a tuple, and so this code will raise an +exception at runtime since the ability to raise tuples is going away. + + +Notes +===== + +.. [#1] I modified tokenize.py to yield a NL pseudo-token for backslash + continuations, so the original source can be reproduced exactly. The + modified version can be found at lib2to3/pgen2/tokenize.py. + +.. [#2] I developed pgen2 while I was at Elemental Security. I modified + it while at Google to suit the needs of this refactoring tool. + + +Development +=========== + +The HACKING file has a list of TODOs -- some simple, some complex -- that would +make good introductions for anyone new to 2to3. + + +Licensing +========= + +The original pgen2 module is copyrighted by Elemental Security. All +new code I wrote specifically for this tool is copyrighted by Google. +New code by others is copyrighted by the respective authors. All code +(whether by me or by others) is licensed to the PSF under a contributor +agreement. + +--Guido van Rossum + + +All code I wrote specifically for this tool before 9 April 2007 is +copyrighted by me. All new code I wrote specifically for this tool after +9 April 2007 is copyrighted by Google. Regardless, my contributions are +licensed to the PSF under a contributor agreement. + +--Collin Winter + +All of my contributions are copyrighted to me and licensed to PSF under the +Python contributor agreement. + +--Benjamin Peterson diff --git a/lib3/2to3/example.py b/lib3/2to3/example.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/example.py @@ -0,0 +1,405 @@ +#!/usr/bin/python + # comment indented by tab + +"""Docstring. + +Here are some doctest exampes: + +>>> print 42 +42 + + >>> d = {1: 1, 2: 2, 2: 2} + >>> d.keys().sort() + >>> print d + {1: 1, 2: 2} + + >>> for i in d.keys(): + ... print i, d[i] + +And a tricky one: + +>>> class X(Structure): +... _fields_ = [("x", c_int), ("y", c_int), ("array", c_char_p * 5)] +... +>>> x = X() +>>> print x._objects +None +>>> + +""" + +import sys + +def unicode_examples(): + a = str(b) + a = "xxx" + a = """xxx""" + a = r'xxx' + a = R'''xxx''' + a = r"xxx" + a = R"""xxx""" + b = "..." '...' + +def ne_examples(): + if x != y: + pass + if x!=y: + pass + if x!=y!=z: + pass + +def has_key_examples(): + # + x = "x" in d or "y" in d + # + x = ("x" in a.b.c.d) ** 3 + # + x = (1 + 2 in a.b).__repr__() + # + x = (1 + 2 in a.b).__repr__() ** -3 ** 4 + # + x = (f or g) in a + # + x = a + (c in b) + # + x = (lambda: 12) in a + # + x = (a for a in b) in a + # + if b not in a: pass + # + if not (b in a).__repr__(): pass + # + if not (b in a) ** 2: pass + +def foo(): + pass # body indented by tab + +def test_ws_comma(): + yield 1,2 ,3 + f(1,2 ,3) + repr((a ,b)) + def f(a,b ,c): pass + { a:b,c:d , e : f } + +def apply_examples(): + x = f(*g + h) + y = f(*g, **h) + z = fs[0](*g or h, **h or g) + # Hello + f(*(x, y) + t) + f(*args) + f(*args, **kwds) + # Test that complex functions are parenthesized + x = (f+g)(*args) + x = (f*g)(*args) + x = (f**g)(*args) + # But dotted names etc. not + x = f.g(*args) + x = f[x](*args) + x = f()(*args) + # Extreme case + x = a.b.c.d.e.f(*args, **kwds) + # XXX Comments in weird places still get lost + f(*args) + +def bad_apply_examples(): + # These should *not* be touched + apply() + apply(f) + apply(f,) + apply(f, args, kwds, extras) + apply(f, *args, **kwds) + apply(f, *args) + apply(func=f, args=args, kwds=kwds) + apply(f, args=args, kwds=kwds) + apply(f, args, kwds=kwds) + +def metaclass_examples(): + class X(metaclass=Meta): + pass + + class X(b1, b2, metaclass=Meta): + bar = 23 # Comment on me! + spam = 27.23 # Laughable + + class X(metaclass=Meta): + x = 23; y = 34 # Yes, I can handle this, too. + +def intern_examples(): + # + # These should be refactored: + # + x = sys.intern(a) + # + y = sys.intern("b" # test + ) + # + z = sys.intern(a+b+c.d,) + # + sys.intern("y%s" % 5).replace("y", "") + # + # These not: + # + intern(a=1) + # + intern(f, g) + # + intern(*h) + # + intern(**i) + +def print_examples(): + # plain vanilla + print(1, 1+1, 1+1+1) + # + print(1, 2) + # + print(1) + + print() + + # trailing commas + print(1, 2, 3, end=' ') + # + print(1, 2, end=' ') + # + print(1, end=' ') + # + print() + + # >> stuff + print(1, 2, 3, file=sys.stderr) # no trailing comma + # + print(1, 2, end=' ', file=sys.stdder) # trailing comma + # + print(1+1, file=sys.stderr) # no trailing comma + # + print(file=sys.stderr) # spaces before sys.stderr + +def exec_examples(): + # + exec(code) + # + exec(code, ns) + # + exec(code, ns1, ns2) + # + exec((a.b()), ns) + # + exec(a.b() + c, ns) + # + # These should not be touched: + # + exec(code) + # + exec (code) + # + exec(code, ns) + # + exec(code, ns1, ns2) + +def repr_examples(): + x = repr(1 + 2) + # + y = repr(x) + # + z = repr(y).__repr__() + # + x = repr((1, 2, 3)) + # + x = repr(1 + repr(2)) + # + x = repr((1, 2 + repr((3, 4)))) + +def except_examples(): + try: + pass + except Exception as xxx_todo_changeme: + (f, e) = xxx_todo_changeme.args + pass + except ImportError as e: + print(e.args) + # + try: + pass + except (RuntimeError, ImportError) as e: + pass + # + try: + pass + except Exception as xxx_todo_changeme1: + (a, b) = xxx_todo_changeme1.args + pass + # + try: + pass + except Exception as xxx_todo_changeme2: + d[5] = xxx_todo_changeme2 + pass + # + try: + pass + except Exception as xxx_todo_changeme3: + a.foo = xxx_todo_changeme3 + pass + # + try: + pass + except Exception as xxx_todo_changeme4: + a().foo = xxx_todo_changeme4 + pass + # + # These should not be touched: + # + try: + pass + except: + pass + # + try: + pass + except Exception: + pass + # + try: + pass + except (Exception, SystemExit): + pass + +def raise_examples(): + raise Exception(5) + # + raise Exception(5) + # + raise Exception(5, 6, 7) + # + # These should not be touched + # + raise Exception + # + raise Exception(5, 6) + # + # These should produce a warning + # TODO: convert "raise E, V, T" to + # "e = E(V); e.__traceback__ = T; raise e;" + # + raise Exception(5).with_traceback(6) + # + raise Exception(5).with_traceback(6) + # + raise Exception(5, 6, 7).with_traceback(6) + +def long_examples(): + x = int(x) + y = isinstance(x, int) + z = type(x) in (int, int) + a = 12 + b = 0x12 + # unchanged: + a = 12 + b = 0x12 + c = 3.14 + +def dict_examples(): + # + # Plain method calls + # + print(list(d.keys())) + print(list(d.items())) + print(list(d.values())) + # + # Plain method calls in special contexts + # + print(iter(list(e.keys()))) + for i in list(e.keys()): print(i) + [i for i in list(e.keys())] + (i for i in list(e.keys())) + # + # Iterator method calls + # + print(iter(f.keys())) + print(iter(f.items())) + print(iter(f.values())) + # + # Iterator method calls in special contexts + # + print(list(g.keys())) + print(sorted(g.keys())) + print(iter(g.keys())) + for i in g.keys(): print(i) + [i for i in g.keys()] + (i for i in g.keys()) + # + # Examples with a "tail"; these are never "special" + # + print(next(iter(h.keys()))) + print(list(h.keys())[0]) + print(list(next(iter(h.keys())))) + for x in list(h.keys())[0]: print(x) + # + # Examples with dict views + # + print(d.keys()) + print(d.items()) + print(d.values()) + +def dict_negative_examples(): + # + # These should all remain unchanged: + # + print(list(h.keys())) + print(sorted(h.keys())) + +def xrange_examples(): + for i in range(100): print(i) + for i in range(0, 100): print(i) + for i in range(0, 100, 10): print(i) + +def input_examples(): + a = eval(input()) + b = eval(input(str(a))) + +def raw_input_examples(): + a = input() + b = input(a.rstrip()) + +def filter_examples(): + list(filter(os.unlink, filenames)) + [_f for _f in "whatever" if _f] + [x for x in range(4) if not x] + +def map_examples(): + list(map(None, foo.bar)) + list(map(None, foo.bar,)) + list(map(None, foo, bar)) + list(map(f, foo.bar)) + list(map(lambda x: x+1, list(range(10)))) + +def basestring_examples(): + if isinstance(x, str): pass + +def buffer_examples(): + x = buffer(y) + +def sys_exc_examples(): + print(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) + +def operator_examples(): + import operator + hasattr(foo, '__call__') + operator.contains(foo, bar) + + from operator import isCallable, sequenceIncludes + # These should produce warnings. + isCallable(foo) + sequenceIncludes(foo, bar) + +class X: + def maximum(self): + return max(self.data.values()) + def total(self): + return sum(self.data.values()) + + +# This is the last line. diff --git a/lib3/2to3/lib2to3/Grammar.txt b/lib3/2to3/lib2to3/Grammar.txt new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/Grammar.txt @@ -0,0 +1,158 @@ +# Grammar for Python + +# Note: Changing the grammar specified in this file will most likely +# require corresponding changes in the parser module +# (../Modules/parsermodule.c). If you can't make the changes to +# that module yourself, please co-ordinate the required changes +# with someone who can; ask around on python-dev for help. Fred +# Drake will probably be listening there. + +# NOTE WELL: You should also follow all the steps listed in PEP 306, +# "How to Change Python's Grammar" + +# Commands for Kees Blom's railroad program +#diagram:token NAME +#diagram:token NUMBER +#diagram:token STRING +#diagram:token NEWLINE +#diagram:token ENDMARKER +#diagram:token INDENT +#diagram:output\input python.bla +#diagram:token DEDENT +#diagram:output\textwidth 20.04cm\oddsidemargin 0.0cm\evensidemargin 0.0cm +#diagram:rules + +# Start symbols for the grammar: +# file_input is a module or sequence of commands read from an input file; +# single_input is a single interactive statement; +# eval_input is the input for the eval() and input() functions. +# NB: compound_stmt in single_input is followed by extra NEWLINE! +file_input: (NEWLINE | stmt)* ENDMARKER +single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE +eval_input: testlist NEWLINE* ENDMARKER + +decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef) +funcdef: 'def' NAME parameters ['->' test] ':' suite +parameters: '(' [typedargslist] ')' +typedargslist: ((tfpdef ['=' test] ',')* + ('*' [tname] (',' tname ['=' test])* [',' '**' tname] | '**' tname) + | tfpdef ['=' test] (',' tfpdef ['=' test])* [',']) +tname: NAME [':' test] +tfpdef: tname | '(' tfplist ')' +tfplist: tfpdef (',' tfpdef)* [','] +varargslist: ((vfpdef ['=' test] ',')* + ('*' [vname] (',' vname ['=' test])* [',' '**' vname] | '**' vname) + | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) +vname: NAME +vfpdef: vname | '(' vfplist ')' +vfplist: vfpdef (',' vfpdef)* [','] + +stmt: simple_stmt | compound_stmt +simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE +small_stmt: (expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt | + import_stmt | global_stmt | exec_stmt | assert_stmt) +expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) | + ('=' (yield_expr|testlist_star_expr))*) +testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] +augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' | + '<<=' | '>>=' | '**=' | '//=') +# For normal assignments, additional restrictions enforced by the interpreter +print_stmt: 'print' ( [ test (',' test)* [','] ] | + '>>' test [ (',' test)+ [','] ] ) +del_stmt: 'del' exprlist +pass_stmt: 'pass' +flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: 'break' +continue_stmt: 'continue' +return_stmt: 'return' [testlist] +yield_stmt: yield_expr +raise_stmt: 'raise' [test ['from' test | ',' test [',' test]]] +import_stmt: import_name | import_from +import_name: 'import' dotted_as_names +import_from: ('from' ('.'* dotted_name | '.'+) + 'import' ('*' | '(' import_as_names ')' | import_as_names)) +import_as_name: NAME ['as' NAME] +dotted_as_name: dotted_name ['as' NAME] +import_as_names: import_as_name (',' import_as_name)* [','] +dotted_as_names: dotted_as_name (',' dotted_as_name)* +dotted_name: NAME ('.' NAME)* +global_stmt: ('global' | 'nonlocal') NAME (',' NAME)* +exec_stmt: 'exec' expr ['in' test [',' test]] +assert_stmt: 'assert' test [',' test] + +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated +if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] +while_stmt: 'while' test ':' suite ['else' ':' suite] +for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] +try_stmt: ('try' ':' suite + ((except_clause ':' suite)+ + ['else' ':' suite] + ['finally' ':' suite] | + 'finally' ':' suite)) +with_stmt: 'with' with_item (',' with_item)* ':' suite +with_item: test ['as' expr] +with_var: 'as' expr +# NB compile.c makes sure that the default except clause is last +except_clause: 'except' [test [(',' | 'as') test]] +suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + +# Backward compatibility cruft to support: +# [ x for x in lambda: True, lambda: False if x() ] +# even while also allowing: +# lambda x: 5 if x else 2 +# (But not a mix of the two) +testlist_safe: old_test [(',' old_test)+ [',']] +old_test: or_test | old_lambdef +old_lambdef: 'lambda' [varargslist] ':' old_test + +test: or_test ['if' or_test 'else' test] | lambdef +or_test: and_test ('or' and_test)* +and_test: not_test ('and' not_test)* +not_test: 'not' not_test | comparison +comparison: expr (comp_op expr)* +comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +star_expr: '*' expr +expr: xor_expr ('|' xor_expr)* +xor_expr: and_expr ('^' and_expr)* +and_expr: shift_expr ('&' shift_expr)* +shift_expr: arith_expr (('<<'|'>>') arith_expr)* +arith_expr: term (('+'|'-') term)* +term: factor (('*'|'/'|'%'|'//') factor)* +factor: ('+'|'-'|'~') factor | power +power: atom trailer* ['**' factor] +atom: ('(' [yield_expr|testlist_gexp] ')' | + '[' [listmaker] ']' | + '{' [dictsetmaker] '}' | + '`' testlist1 '`' | + NAME | NUMBER | STRING+ | '.' '.' '.') +listmaker: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) +testlist_gexp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) +lambdef: 'lambda' [varargslist] ':' test +trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME +subscriptlist: subscript (',' subscript)* [','] +subscript: test | [test] ':' [test] [sliceop] +sliceop: ':' [test] +exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] +testlist: test (',' test)* [','] +dictsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) | + (test (comp_for | (',' test)* [','])) ) + +classdef: 'class' NAME ['(' [arglist] ')'] ':' suite + +arglist: (argument ',')* (argument [','] + |'*' test (',' argument)* [',' '**' test] + |'**' test) +argument: test [comp_for] | test '=' test # Really [keyword '='] test + +comp_iter: comp_for | comp_if +comp_for: 'for' exprlist 'in' testlist_safe [comp_iter] +comp_if: 'if' old_test [comp_iter] + +testlist1: test (',' test)* + +# not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: 'yield' [testlist] diff --git a/lib3/2to3/lib2to3/Grammar3.3.2.final.0.pickle b/lib3/2to3/lib2to3/Grammar3.3.2.final.0.pickle new file mode 100644 index 0000000000000000000000000000000000000000..75b6ed97f37c891e7bf59e87bc4d9b00f8dc2220 GIT binary patch [stripped] diff --git a/lib3/2to3/lib2to3/Grammar3.4.0.alpha.1.pickle b/lib3/2to3/lib2to3/Grammar3.4.0.alpha.1.pickle new file mode 100644 index 0000000000000000000000000000000000000000..47538c4926776fb9cfca46f5b012954416fe1ccd GIT binary patch [stripped] diff --git a/lib3/2to3/lib2to3/PatternGrammar.txt b/lib3/2to3/lib2to3/PatternGrammar.txt new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/PatternGrammar.txt @@ -0,0 +1,28 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# A grammar to describe tree matching patterns. +# Not shown here: +# - 'TOKEN' stands for any token (leaf node) +# - 'any' stands for any node (leaf or interior) +# With 'any' we can still specify the sub-structure. + +# The start symbol is 'Matcher'. + +Matcher: Alternatives ENDMARKER + +Alternatives: Alternative ('|' Alternative)* + +Alternative: (Unit | NegatedUnit)+ + +Unit: [NAME '='] ( STRING [Repeater] + | NAME [Details] [Repeater] + | '(' Alternatives ')' [Repeater] + | '[' Alternatives ']' + ) + +NegatedUnit: 'not' (STRING | NAME [Details] | '(' Alternatives ')') + +Repeater: '*' | '+' | '{' NUMBER [',' NUMBER] '}' + +Details: '<' Alternatives '>' diff --git a/lib3/2to3/lib2to3/PatternGrammar3.3.2.final.0.pickle b/lib3/2to3/lib2to3/PatternGrammar3.3.2.final.0.pickle new file mode 100644 index 0000000000000000000000000000000000000000..65c9c657879b3059c16e7db1168a1a823b529f65 GIT binary patch [stripped] diff --git a/lib3/2to3/lib2to3/PatternGrammar3.4.0.alpha.1.pickle b/lib3/2to3/lib2to3/PatternGrammar3.4.0.alpha.1.pickle new file mode 100644 index 0000000000000000000000000000000000000000..92ba51d6585b371200989ad68c00eb74858f43e1 GIT binary patch [stripped] diff --git a/lib3/2to3/lib2to3/__init__.py b/lib3/2to3/lib2to3/__init__.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/__init__.py @@ -0,0 +1,1 @@ +#empty diff --git a/lib3/2to3/lib2to3/btm_matcher.py b/lib3/2to3/lib2to3/btm_matcher.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/btm_matcher.py @@ -0,0 +1,168 @@ +"""A bottom-up tree matching algorithm implementation meant to speed +up 2to3's matching process. After the tree patterns are reduced to +their rarest linear path, a linear Aho-Corasick automaton is +created. The linear automaton traverses the linear paths from the +leaves to the root of the AST and returns a set of nodes for further +matching. This reduces significantly the number of candidate nodes.""" + +__author__ = "George Boutsioukis " + +import logging +import itertools +from collections import defaultdict + +from . import pytree +from .btm_utils import reduce_tree + +class BMNode(object): + """Class for a node of the Aho-Corasick automaton used in matching""" + count = itertools.count() + def __init__(self): + self.transition_table = {} + self.fixers = [] + self.id = next(BMNode.count) + self.content = '' + +class BottomMatcher(object): + """The main matcher class. After instantiating the patterns should + be added using the add_fixer method""" + + def __init__(self): + self.match = set() + self.root = BMNode() + self.nodes = [self.root] + self.fixers = [] + self.logger = logging.getLogger("RefactoringTool") + + def add_fixer(self, fixer): + """Reduces a fixer's pattern tree to a linear path and adds it + to the matcher(a common Aho-Corasick automaton). The fixer is + appended on the matching states and called when they are + reached""" + self.fixers.append(fixer) + tree = reduce_tree(fixer.pattern_tree) + linear = tree.get_linear_subpattern() + match_nodes = self.add(linear, start=self.root) + for match_node in match_nodes: + match_node.fixers.append(fixer) + + def add(self, pattern, start): + "Recursively adds a linear pattern to the AC automaton" + #print("adding pattern", pattern, "to", start) + if not pattern: + #print("empty pattern") + return [start] + if isinstance(pattern[0], tuple): + #alternatives + #print("alternatives") + match_nodes = [] + for alternative in pattern[0]: + #add all alternatives, and add the rest of the pattern + #to each end node + end_nodes = self.add(alternative, start=start) + for end in end_nodes: + match_nodes.extend(self.add(pattern[1:], end)) + return match_nodes + else: + #single token + #not last + if pattern[0] not in start.transition_table: + #transition did not exist, create new + next_node = BMNode() + start.transition_table[pattern[0]] = next_node + else: + #transition exists already, follow + next_node = start.transition_table[pattern[0]] + + if pattern[1:]: + end_nodes = self.add(pattern[1:], start=next_node) + else: + end_nodes = [next_node] + return end_nodes + + def run(self, leaves): + """The main interface with the bottom matcher. The tree is + traversed from the bottom using the constructed + automaton. Nodes are only checked once as the tree is + retraversed. When the automaton fails, we give it one more + shot(in case the above tree matches as a whole with the + rejected leaf), then we break for the next leaf. There is the + special case of multiple arguments(see code comments) where we + recheck the nodes + + Args: + The leaves of the AST tree to be matched + + Returns: + A dictionary of node matches with fixers as the keys + """ + current_ac_node = self.root + results = defaultdict(list) + for leaf in leaves: + current_ast_node = leaf + while current_ast_node: + current_ast_node.was_checked = True + for child in current_ast_node.children: + # multiple statements, recheck + if isinstance(child, pytree.Leaf) and child.value == ";": + current_ast_node.was_checked = False + break + if current_ast_node.type == 1: + #name + node_token = current_ast_node.value + else: + node_token = current_ast_node.type + + if node_token in current_ac_node.transition_table: + #token matches + current_ac_node = current_ac_node.transition_table[node_token] + for fixer in current_ac_node.fixers: + if not fixer in results: + results[fixer] = [] + results[fixer].append(current_ast_node) + + else: + #matching failed, reset automaton + current_ac_node = self.root + if (current_ast_node.parent is not None + and current_ast_node.parent.was_checked): + #the rest of the tree upwards has been checked, next leaf + break + + #recheck the rejected node once from the root + if node_token in current_ac_node.transition_table: + #token matches + current_ac_node = current_ac_node.transition_table[node_token] + for fixer in current_ac_node.fixers: + if not fixer in list(results.keys()): + results[fixer] = [] + results[fixer].append(current_ast_node) + + current_ast_node = current_ast_node.parent + return results + + def print_ac(self): + "Prints a graphviz diagram of the BM automaton(for debugging)" + print("digraph g{") + def print_node(node): + for subnode_key in list(node.transition_table.keys()): + subnode = node.transition_table[subnode_key] + print(("%d -> %d [label=%s] //%s" % + (node.id, subnode.id, type_repr(subnode_key), str(subnode.fixers)))) + if subnode_key == 1: + print((subnode.content)) + print_node(subnode) + print_node(self.root) + print("}") + +# taken from pytree.py for debugging; only used by print_ac +_type_reprs = {} +def type_repr(type_num): + global _type_reprs + if not _type_reprs: + from .pygram import python_symbols + # printing tokens is possible but not as useful + # from .pgen2 import token // token.__dict__.items(): + for name, val in list(python_symbols.__dict__.items()): + if type(val) == int: _type_reprs[val] = name + return _type_reprs.setdefault(type_num, type_num) diff --git a/lib3/2to3/lib2to3/btm_utils.py b/lib3/2to3/lib2to3/btm_utils.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/btm_utils.py @@ -0,0 +1,283 @@ +"Utility functions used by the btm_matcher module" + +from . import pytree +from .pgen2 import grammar, token +from .pygram import pattern_symbols, python_symbols + +syms = pattern_symbols +pysyms = python_symbols +tokens = grammar.opmap +token_labels = token + +TYPE_ANY = -1 +TYPE_ALTERNATIVES = -2 +TYPE_GROUP = -3 + +class MinNode(object): + """This class serves as an intermediate representation of the + pattern tree during the conversion to sets of leaf-to-root + subpatterns""" + + def __init__(self, type=None, name=None): + self.type = type + self.name = name + self.children = [] + self.leaf = False + self.parent = None + self.alternatives = [] + self.group = [] + + def __repr__(self): + return str(self.type) + ' ' + str(self.name) + + def leaf_to_root(self): + """Internal method. Returns a characteristic path of the + pattern tree. This method must be run for all leaves until the + linear subpatterns are merged into a single""" + node = self + subp = [] + while node: + if node.type == TYPE_ALTERNATIVES: + node.alternatives.append(subp) + if len(node.alternatives) == len(node.children): + #last alternative + subp = [tuple(node.alternatives)] + node.alternatives = [] + node = node.parent + continue + else: + node = node.parent + subp = None + break + + if node.type == TYPE_GROUP: + node.group.append(subp) + #probably should check the number of leaves + if len(node.group) == len(node.children): + subp = get_characteristic_subpattern(node.group) + node.group = [] + node = node.parent + continue + else: + node = node.parent + subp = None + break + + if node.type == token_labels.NAME and node.name: + #in case of type=name, use the name instead + subp.append(node.name) + else: + subp.append(node.type) + + node = node.parent + return subp + + def get_linear_subpattern(self): + """Drives the leaf_to_root method. The reason that + leaf_to_root must be run multiple times is because we need to + reject 'group' matches; for example the alternative form + (a | b c) creates a group [b c] that needs to be matched. Since + matching multiple linear patterns overcomes the automaton's + capabilities, leaf_to_root merges each group into a single + choice based on 'characteristic'ity, + + i.e. (a|b c) -> (a|b) if b more characteristic than c + + Returns: The most 'characteristic'(as defined by + get_characteristic_subpattern) path for the compiled pattern + tree. + """ + + for l in self.leaves(): + subp = l.leaf_to_root() + if subp: + return subp + + def leaves(self): + "Generator that returns the leaves of the tree" + for child in self.children: + for x in child.leaves(): + yield x + if not self.children: + yield self + +def reduce_tree(node, parent=None): + """ + Internal function. Reduces a compiled pattern tree to an + intermediate representation suitable for feeding the + automaton. This also trims off any optional pattern elements(like + [a], a*). + """ + + new_node = None + #switch on the node type + if node.type == syms.Matcher: + #skip + node = node.children[0] + + if node.type == syms.Alternatives : + #2 cases + if len(node.children) <= 2: + #just a single 'Alternative', skip this node + new_node = reduce_tree(node.children[0], parent) + else: + #real alternatives + new_node = MinNode(type=TYPE_ALTERNATIVES) + #skip odd children('|' tokens) + for child in node.children: + if node.children.index(child)%2: + continue + reduced = reduce_tree(child, new_node) + if reduced is not None: + new_node.children.append(reduced) + elif node.type == syms.Alternative: + if len(node.children) > 1: + + new_node = MinNode(type=TYPE_GROUP) + for child in node.children: + reduced = reduce_tree(child, new_node) + if reduced: + new_node.children.append(reduced) + if not new_node.children: + # delete the group if all of the children were reduced to None + new_node = None + + else: + new_node = reduce_tree(node.children[0], parent) + + elif node.type == syms.Unit: + if (isinstance(node.children[0], pytree.Leaf) and + node.children[0].value == '('): + #skip parentheses + return reduce_tree(node.children[1], parent) + if ((isinstance(node.children[0], pytree.Leaf) and + node.children[0].value == '[') + or + (len(node.children)>1 and + hasattr(node.children[1], "value") and + node.children[1].value == '[')): + #skip whole unit if its optional + return None + + leaf = True + details_node = None + alternatives_node = None + has_repeater = False + repeater_node = None + has_variable_name = False + + for child in node.children: + if child.type == syms.Details: + leaf = False + details_node = child + elif child.type == syms.Repeater: + has_repeater = True + repeater_node = child + elif child.type == syms.Alternatives: + alternatives_node = child + if hasattr(child, 'value') and child.value == '=': # variable name + has_variable_name = True + + #skip variable name + if has_variable_name: + #skip variable name, '=' + name_leaf = node.children[2] + if hasattr(name_leaf, 'value') and name_leaf.value == '(': + # skip parenthesis + name_leaf = node.children[3] + else: + name_leaf = node.children[0] + + #set node type + if name_leaf.type == token_labels.NAME: + #(python) non-name or wildcard + if name_leaf.value == 'any': + new_node = MinNode(type=TYPE_ANY) + else: + if hasattr(token_labels, name_leaf.value): + new_node = MinNode(type=getattr(token_labels, name_leaf.value)) + else: + new_node = MinNode(type=getattr(pysyms, name_leaf.value)) + + elif name_leaf.type == token_labels.STRING: + #(python) name or character; remove the apostrophes from + #the string value + name = name_leaf.value.strip("'") + if name in tokens: + new_node = MinNode(type=tokens[name]) + else: + new_node = MinNode(type=token_labels.NAME, name=name) + elif name_leaf.type == syms.Alternatives: + new_node = reduce_tree(alternatives_node, parent) + + #handle repeaters + if has_repeater: + if repeater_node.children[0].value == '*': + #reduce to None + new_node = None + elif repeater_node.children[0].value == '+': + #reduce to a single occurence i.e. do nothing + pass + else: + #TODO: handle {min, max} repeaters + raise NotImplementedError + pass + + #add children + if details_node and new_node is not None: + for child in details_node.children[1:-1]: + #skip '<', '>' markers + reduced = reduce_tree(child, new_node) + if reduced is not None: + new_node.children.append(reduced) + if new_node: + new_node.parent = parent + return new_node + + +def get_characteristic_subpattern(subpatterns): + """Picks the most characteristic from a list of linear patterns + Current order used is: + names > common_names > common_chars + """ + if not isinstance(subpatterns, list): + return subpatterns + if len(subpatterns)==1: + return subpatterns[0] + + # first pick out the ones containing variable names + subpatterns_with_names = [] + subpatterns_with_common_names = [] + common_names = ['in', 'for', 'if' , 'not', 'None'] + subpatterns_with_common_chars = [] + common_chars = "[]().,:" + for subpattern in subpatterns: + if any(rec_test(subpattern, lambda x: type(x) is str)): + if any(rec_test(subpattern, + lambda x: isinstance(x, str) and x in common_chars)): + subpatterns_with_common_chars.append(subpattern) + elif any(rec_test(subpattern, + lambda x: isinstance(x, str) and x in common_names)): + subpatterns_with_common_names.append(subpattern) + + else: + subpatterns_with_names.append(subpattern) + + if subpatterns_with_names: + subpatterns = subpatterns_with_names + elif subpatterns_with_common_names: + subpatterns = subpatterns_with_common_names + elif subpatterns_with_common_chars: + subpatterns = subpatterns_with_common_chars + # of the remaining subpatterns pick out the longest one + return max(subpatterns, key=len) + +def rec_test(sequence, test_func): + """Tests test_func on all items of sequence and items of included + sub-iterables""" + for x in sequence: + if isinstance(x, (list, tuple)): + for y in rec_test(x, test_func): + yield y + else: + yield test_func(x) diff --git a/lib3/2to3/lib2to3/fixer_base.py b/lib3/2to3/lib2to3/fixer_base.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixer_base.py @@ -0,0 +1,189 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Base class for fixers (optional, but recommended).""" + +# Python imports +import logging +import itertools + +# Local imports +from .patcomp import PatternCompiler +from . import pygram +from .fixer_util import does_tree_import + +class BaseFix(object): + + """Optional base class for fixers. + + The subclass name must be FixFooBar where FooBar is the result of + removing underscores and capitalizing the words of the fix name. + For example, the class name for a fixer named 'has_key' should be + FixHasKey. + """ + + PATTERN = None # Most subclasses should override with a string literal + pattern = None # Compiled pattern, set by compile_pattern() + pattern_tree = None # Tree representation of the pattern + options = None # Options object passed to initializer + filename = None # The filename (set by set_filename) + logger = None # A logger (set by set_filename) + numbers = itertools.count(1) # For new_name() + used_names = set() # A set of all used NAMEs + order = "post" # Does the fixer prefer pre- or post-order traversal + explicit = False # Is this ignored by refactor.py -f all? + run_order = 5 # Fixers will be sorted by run order before execution + # Lower numbers will be run first. + _accept_type = None # [Advanced and not public] This tells RefactoringTool + # which node type to accept when there's not a pattern. + + keep_line_order = False # For the bottom matcher: match with the + # original line order + BM_compatible = False # Compatibility with the bottom matching + # module; every fixer should set this + # manually + + # Shortcut for access to Python grammar symbols + syms = pygram.python_symbols + + def __init__(self, options, log): + """Initializer. Subclass may override. + + Args: + options: an dict containing the options passed to RefactoringTool + that could be used to customize the fixer through the command line. + log: a list to append warnings and other messages to. + """ + self.options = options + self.log = log + self.compile_pattern() + + def compile_pattern(self): + """Compiles self.PATTERN into self.pattern. + + Subclass may override if it doesn't want to use + self.{pattern,PATTERN} in .match(). + """ + if self.PATTERN is not None: + PC = PatternCompiler() + self.pattern, self.pattern_tree = PC.compile_pattern(self.PATTERN, + with_tree=True) + + def set_filename(self, filename): + """Set the filename, and a logger derived from it. + + The main refactoring tool should call this. + """ + self.filename = filename + self.logger = logging.getLogger(filename) + + def match(self, node): + """Returns match for a given parse tree node. + + Should return a true or false object (not necessarily a bool). + It may return a non-empty dict of matching sub-nodes as + returned by a matching pattern. + + Subclass may override. + """ + results = {"node": node} + return self.pattern.match(node, results) and results + + def transform(self, node, results): + """Returns the transformation for a given parse tree node. + + Args: + node: the root of the parse tree that matched the fixer. + results: a dict mapping symbolic names to part of the match. + + Returns: + None, or a node that is a modified copy of the + argument node. The node argument may also be modified in-place to + effect the same change. + + Subclass *must* override. + """ + raise NotImplementedError() + + def new_name(self, template="xxx_todo_changeme"): + """Return a string suitable for use as an identifier + + The new name is guaranteed not to conflict with other identifiers. + """ + name = template + while name in self.used_names: + name = template + str(next(self.numbers)) + self.used_names.add(name) + return name + + def log_message(self, message): + if self.first_log: + self.first_log = False + self.log.append("### In file %s ###" % self.filename) + self.log.append(message) + + def cannot_convert(self, node, reason=None): + """Warn the user that a given chunk of code is not valid Python 3, + but that it cannot be converted automatically. + + First argument is the top-level node for the code in question. + Optional second argument is why it can't be converted. + """ + lineno = node.get_lineno() + for_output = node.clone() + for_output.prefix = "" + msg = "Line %d: could not convert: %s" + self.log_message(msg % (lineno, for_output)) + if reason: + self.log_message(reason) + + def warning(self, node, reason): + """Used for warning the user about possible uncertainty in the + translation. + + First argument is the top-level node for the code in question. + Optional second argument is why it can't be converted. + """ + lineno = node.get_lineno() + self.log_message("Line %d: %s" % (lineno, reason)) + + def start_tree(self, tree, filename): + """Some fixers need to maintain tree-wide state. + This method is called once, at the start of tree fix-up. + + tree - the root node of the tree to be processed. + filename - the name of the file the tree came from. + """ + self.used_names = tree.used_names + self.set_filename(filename) + self.numbers = itertools.count(1) + self.first_log = True + + def finish_tree(self, tree, filename): + """Some fixers need to maintain tree-wide state. + This method is called once, at the conclusion of tree fix-up. + + tree - the root node of the tree to be processed. + filename - the name of the file the tree came from. + """ + pass + + +class ConditionalFix(BaseFix): + """ Base class for fixers which not execute if an import is found. """ + + # This is the name of the import which, if found, will cause the test to be skipped + skip_on = None + + def start_tree(self, *args): + super(ConditionalFix, self).start_tree(*args) + self._should_skip = None + + def should_skip(self, node): + if self._should_skip is not None: + return self._should_skip + pkg = self.skip_on.split(".") + name = pkg[-1] + pkg = ".".join(pkg[:-1]) + self._should_skip = does_tree_import(pkg, name, node) + return self._should_skip diff --git a/lib3/2to3/lib2to3/fixer_util.py b/lib3/2to3/lib2to3/fixer_util.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixer_util.py @@ -0,0 +1,432 @@ +"""Utility functions, node construction macros, etc.""" +# Author: Collin Winter + +from itertools import islice + +# Local imports +from .pgen2 import token +from .pytree import Leaf, Node +from .pygram import python_symbols as syms +from . import patcomp + + +########################################################### +### Common node-construction "macros" +########################################################### + +def KeywordArg(keyword, value): + return Node(syms.argument, + [keyword, Leaf(token.EQUAL, "="), value]) + +def LParen(): + return Leaf(token.LPAR, "(") + +def RParen(): + return Leaf(token.RPAR, ")") + +def Assign(target, source): + """Build an assignment statement""" + if not isinstance(target, list): + target = [target] + if not isinstance(source, list): + source.prefix = " " + source = [source] + + return Node(syms.atom, + target + [Leaf(token.EQUAL, "=", prefix=" ")] + source) + +def Name(name, prefix=None): + """Return a NAME leaf""" + return Leaf(token.NAME, name, prefix=prefix) + +def Attr(obj, attr): + """A node tuple for obj.attr""" + return [obj, Node(syms.trailer, [Dot(), attr])] + +def Comma(): + """A comma leaf""" + return Leaf(token.COMMA, ",") + +def Dot(): + """A period (.) leaf""" + return Leaf(token.DOT, ".") + +def ArgList(args, lparen=LParen(), rparen=RParen()): + """A parenthesised argument list, used by Call()""" + node = Node(syms.trailer, [lparen.clone(), rparen.clone()]) + if args: + node.insert_child(1, Node(syms.arglist, args)) + return node + +def Call(func_name, args=None, prefix=None): + """A function call""" + node = Node(syms.power, [func_name, ArgList(args)]) + if prefix is not None: + node.prefix = prefix + return node + +def Newline(): + """A newline literal""" + return Leaf(token.NEWLINE, "\n") + +def BlankLine(): + """A blank line""" + return Leaf(token.NEWLINE, "") + +def Number(n, prefix=None): + return Leaf(token.NUMBER, n, prefix=prefix) + +def Subscript(index_node): + """A numeric or string subscript""" + return Node(syms.trailer, [Leaf(token.LBRACE, "["), + index_node, + Leaf(token.RBRACE, "]")]) + +def String(string, prefix=None): + """A string leaf""" + return Leaf(token.STRING, string, prefix=prefix) + +def ListComp(xp, fp, it, test=None): + """A list comprehension of the form [xp for fp in it if test]. + + If test is None, the "if test" part is omitted. + """ + xp.prefix = "" + fp.prefix = " " + it.prefix = " " + for_leaf = Leaf(token.NAME, "for") + for_leaf.prefix = " " + in_leaf = Leaf(token.NAME, "in") + in_leaf.prefix = " " + inner_args = [for_leaf, fp, in_leaf, it] + if test: + test.prefix = " " + if_leaf = Leaf(token.NAME, "if") + if_leaf.prefix = " " + inner_args.append(Node(syms.comp_if, [if_leaf, test])) + inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)]) + return Node(syms.atom, + [Leaf(token.LBRACE, "["), + inner, + Leaf(token.RBRACE, "]")]) + +def FromImport(package_name, name_leafs): + """ Return an import statement in the form: + from package import name_leafs""" + # XXX: May not handle dotted imports properly (eg, package_name='foo.bar') + #assert package_name == '.' or '.' not in package_name, "FromImport has "\ + # "not been tested with dotted package names -- use at your own "\ + # "peril!" + + for leaf in name_leafs: + # Pull the leaves out of their old tree + leaf.remove() + + children = [Leaf(token.NAME, "from"), + Leaf(token.NAME, package_name, prefix=" "), + Leaf(token.NAME, "import", prefix=" "), + Node(syms.import_as_names, name_leafs)] + imp = Node(syms.import_from, children) + return imp + + +########################################################### +### Determine whether a node represents a given literal +########################################################### + +def is_tuple(node): + """Does the node represent a tuple literal?""" + if isinstance(node, Node) and node.children == [LParen(), RParen()]: + return True + return (isinstance(node, Node) + and len(node.children) == 3 + and isinstance(node.children[0], Leaf) + and isinstance(node.children[1], Node) + and isinstance(node.children[2], Leaf) + and node.children[0].value == "(" + and node.children[2].value == ")") + +def is_list(node): + """Does the node represent a list literal?""" + return (isinstance(node, Node) + and len(node.children) > 1 + and isinstance(node.children[0], Leaf) + and isinstance(node.children[-1], Leaf) + and node.children[0].value == "[" + and node.children[-1].value == "]") + + +########################################################### +### Misc +########################################################### + +def parenthesize(node): + return Node(syms.atom, [LParen(), node, RParen()]) + + +consuming_calls = set(["sorted", "list", "set", "any", "all", "tuple", "sum", + "min", "max"]) + +def attr_chain(obj, attr): + """Follow an attribute chain. + + If you have a chain of objects where a.foo -> b, b.foo-> c, etc, + use this to iterate over all objects in the chain. Iteration is + terminated by getattr(x, attr) is None. + + Args: + obj: the starting object + attr: the name of the chaining attribute + + Yields: + Each successive object in the chain. + """ + next = getattr(obj, attr) + while next: + yield next + next = getattr(next, attr) + +p0 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + """ +p1 = """ +power< + ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' | + 'any' | 'all' | (any* trailer< '.' 'join' >) ) + trailer< '(' node=any ')' > + any* +> +""" +p2 = """ +power< + 'sorted' + trailer< '(' arglist ')' > + any* +> +""" +pats_built = False +def in_special_context(node): + """ Returns true if node is in an environment where all that is required + of it is being itterable (ie, it doesn't matter if it returns a list + or an itterator). + See test_map_nochange in test_fixers.py for some examples and tests. + """ + global p0, p1, p2, pats_built + if not pats_built: + p1 = patcomp.compile_pattern(p1) + p0 = patcomp.compile_pattern(p0) + p2 = patcomp.compile_pattern(p2) + pats_built = True + patterns = [p0, p1, p2] + for pattern, parent in zip(patterns, attr_chain(node, "parent")): + results = {} + if pattern.match(parent, results) and results["node"] is node: + return True + return False + +def is_probably_builtin(node): + """ + Check that something isn't an attribute or function name etc. + """ + prev = node.prev_sibling + if prev is not None and prev.type == token.DOT: + # Attribute lookup. + return False + parent = node.parent + if parent.type in (syms.funcdef, syms.classdef): + return False + if parent.type == syms.expr_stmt and parent.children[0] is node: + # Assignment. + return False + if parent.type == syms.parameters or \ + (parent.type == syms.typedargslist and ( + (prev is not None and prev.type == token.COMMA) or + parent.children[0] is node + )): + # The name of an argument. + return False + return True + +def find_indentation(node): + """Find the indentation of *node*.""" + while node is not None: + if node.type == syms.suite and len(node.children) > 2: + indent = node.children[1] + if indent.type == token.INDENT: + return indent.value + node = node.parent + return "" + +########################################################### +### The following functions are to find bindings in a suite +########################################################### + +def make_suite(node): + if node.type == syms.suite: + return node + node = node.clone() + parent, node.parent = node.parent, None + suite = Node(syms.suite, [node]) + suite.parent = parent + return suite + +def find_root(node): + """Find the top level namespace.""" + # Scamper up to the top level namespace + while node.type != syms.file_input: + assert node.parent, "Tree is insane! root found before "\ + "file_input node was found." + node = node.parent + return node + +def does_tree_import(package, name, node): + """ Returns true if name is imported from package at the + top level of the tree which node belongs to. + To cover the case of an import like 'import foo', use + None for the package and 'foo' for the name. """ + binding = find_binding(name, find_root(node), package) + return bool(binding) + +def is_import(node): + """Returns true if the node is an import statement.""" + return node.type in (syms.import_name, syms.import_from) + +def touch_import(package, name, node): + """ Works like `does_tree_import` but adds an import statement + if it was not imported. """ + def is_import_stmt(node): + return (node.type == syms.simple_stmt and node.children and + is_import(node.children[0])) + + root = find_root(node) + + if does_tree_import(package, name, root): + return + + # figure out where to insert the new import. First try to find + # the first import and then skip to the last one. + insert_pos = offset = 0 + for idx, node in enumerate(root.children): + if not is_import_stmt(node): + continue + for offset, node2 in enumerate(root.children[idx:]): + if not is_import_stmt(node2): + break + insert_pos = idx + offset + break + + # if there are no imports where we can insert, find the docstring. + # if that also fails, we stick to the beginning of the file + if insert_pos == 0: + for idx, node in enumerate(root.children): + if (node.type == syms.simple_stmt and node.children and + node.children[0].type == token.STRING): + insert_pos = idx + 1 + break + + if package is None: + import_ = Node(syms.import_name, [ + Leaf(token.NAME, "import"), + Leaf(token.NAME, name, prefix=" ") + ]) + else: + import_ = FromImport(package, [Leaf(token.NAME, name, prefix=" ")]) + + children = [import_, Newline()] + root.insert_child(insert_pos, Node(syms.simple_stmt, children)) + + +_def_syms = set([syms.classdef, syms.funcdef]) +def find_binding(name, node, package=None): + """ Returns the node which binds variable name, otherwise None. + If optional argument package is supplied, only imports will + be returned. + See test cases for examples.""" + for child in node.children: + ret = None + if child.type == syms.for_stmt: + if _find(name, child.children[1]): + return child + n = find_binding(name, make_suite(child.children[-1]), package) + if n: ret = n + elif child.type in (syms.if_stmt, syms.while_stmt): + n = find_binding(name, make_suite(child.children[-1]), package) + if n: ret = n + elif child.type == syms.try_stmt: + n = find_binding(name, make_suite(child.children[2]), package) + if n: + ret = n + else: + for i, kid in enumerate(child.children[3:]): + if kid.type == token.COLON and kid.value == ":": + # i+3 is the colon, i+4 is the suite + n = find_binding(name, make_suite(child.children[i+4]), package) + if n: ret = n + elif child.type in _def_syms and child.children[1].value == name: + ret = child + elif _is_import_binding(child, name, package): + ret = child + elif child.type == syms.simple_stmt: + ret = find_binding(name, child, package) + elif child.type == syms.expr_stmt: + if _find(name, child.children[0]): + ret = child + + if ret: + if not package: + return ret + if is_import(ret): + return ret + return None + +_block_syms = set([syms.funcdef, syms.classdef, syms.trailer]) +def _find(name, node): + nodes = [node] + while nodes: + node = nodes.pop() + if node.type > 256 and node.type not in _block_syms: + nodes.extend(node.children) + elif node.type == token.NAME and node.value == name: + return node + return None + +def _is_import_binding(node, name, package=None): + """ Will reuturn node if node will import name, or node + will import * from package. None is returned otherwise. + See test cases for examples. """ + + if node.type == syms.import_name and not package: + imp = node.children[1] + if imp.type == syms.dotted_as_names: + for child in imp.children: + if child.type == syms.dotted_as_name: + if child.children[2].value == name: + return node + elif child.type == token.NAME and child.value == name: + return node + elif imp.type == syms.dotted_as_name: + last = imp.children[-1] + if last.type == token.NAME and last.value == name: + return node + elif imp.type == token.NAME and imp.value == name: + return node + elif node.type == syms.import_from: + # unicode(...) is used to make life easier here, because + # from a.b import parses to ['import', ['a', '.', 'b'], ...] + if package and str(node.children[1]).strip() != package: + return None + n = node.children[3] + if package and _find("as", n): + # See test_from_import_as for explanation + return None + elif n.type == syms.import_as_names and _find(name, n): + return node + elif n.type == syms.import_as_name: + child = n.children[2] + if child.type == token.NAME and child.value == name: + return node + elif n.type == token.NAME and n.value == name: + return node + elif package and n.type == token.STAR: + return node + return None diff --git a/lib3/2to3/lib2to3/fixes/__init__.py b/lib3/2to3/lib2to3/fixes/__init__.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/__init__.py @@ -0,0 +1,1 @@ +# Dummy file to make this directory a package. diff --git a/lib3/2to3/lib2to3/fixes/fix_apply.py b/lib3/2to3/lib2to3/fixes/fix_apply.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_apply.py @@ -0,0 +1,59 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for apply(). + +This converts apply(func, v, k) into (func)(*v, **k).""" + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Call, Comma, parenthesize + +class FixApply(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< 'apply' + trailer< + '(' + arglist< + (not argument + ')' + > + > + """ + + def transform(self, node, results): + syms = self.syms + assert results + func = results["func"] + args = results["args"] + kwds = results.get("kwds") + prefix = node.prefix + func = func.clone() + if (func.type not in (token.NAME, syms.atom) and + (func.type != syms.power or + func.children[-2].type == token.DOUBLESTAR)): + # Need to parenthesize + func = parenthesize(func) + func.prefix = "" + args = args.clone() + args.prefix = "" + if kwds is not None: + kwds = kwds.clone() + kwds.prefix = "" + l_newargs = [pytree.Leaf(token.STAR, "*"), args] + if kwds is not None: + l_newargs.extend([Comma(), + pytree.Leaf(token.DOUBLESTAR, "**"), + kwds]) + l_newargs[-2].prefix = " " # that's the ** token + # XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t) + # can be translated into f(x, y, *t) instead of f(*(x, y) + t) + #new = pytree.Node(syms.power, (func, ArgList(l_newargs))) + return Call(func, l_newargs, prefix=prefix) diff --git a/lib3/2to3/lib2to3/fixes/fix_basestring.py b/lib3/2to3/lib2to3/fixes/fix_basestring.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_basestring.py @@ -0,0 +1,14 @@ +"""Fixer for basestring -> str.""" +# Author: Christian Heimes + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixBasestring(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = "'basestring'" + + def transform(self, node, results): + return Name("str", prefix=node.prefix) diff --git a/lib3/2to3/lib2to3/fixes/fix_buffer.py b/lib3/2to3/lib2to3/fixes/fix_buffer.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_buffer.py @@ -0,0 +1,22 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes buffer(...) into memoryview(...).""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixBuffer(fixer_base.BaseFix): + BM_compatible = True + + explicit = True # The user must ask for this fixer + + PATTERN = """ + power< name='buffer' trailer< '(' [any] ')' > any* > + """ + + def transform(self, node, results): + name = results["name"] + name.replace(Name("memoryview", prefix=name.prefix)) diff --git a/lib3/2to3/lib2to3/fixes/fix_callable.py b/lib3/2to3/lib2to3/fixes/fix_callable.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_callable.py @@ -0,0 +1,37 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for callable(). + +This converts callable(obj) into isinstance(obj, collections.Callable), adding a +collections import if needed.""" + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import Call, Name, String, Attr, touch_import + +class FixCallable(fixer_base.BaseFix): + BM_compatible = True + + order = "pre" + + # Ignore callable(*args) or use of keywords. + # Either could be a hint that the builtin callable() is not being used. + PATTERN = """ + power< 'callable' + trailer< lpar='(' + ( not(arglist | argument) any ','> ) + rpar=')' > + after=any* + > + """ + + def transform(self, node, results): + func = results['func'] + + touch_import(None, 'collections', node=node) + + args = [func.clone(), String(', ')] + args.extend(Attr(Name('collections'), Name('Callable'))) + return Call(Name('isinstance'), args, prefix=node.prefix) diff --git a/lib3/2to3/lib2to3/fixes/fix_dict.py b/lib3/2to3/lib2to3/fixes/fix_dict.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_dict.py @@ -0,0 +1,107 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for dict methods. + +d.keys() -> list(d.keys()) +d.items() -> list(d.items()) +d.values() -> list(d.values()) + +d.iterkeys() -> iter(d.keys()) +d.iteritems() -> iter(d.items()) +d.itervalues() -> iter(d.values()) + +d.viewkeys() -> d.keys() +d.viewitems() -> d.items() +d.viewvalues() -> d.values() + +Except in certain very specific contexts: the iter() can be dropped +when the context is list(), sorted(), iter() or for...in; the list() +can be dropped when the context is list() or sorted() (but not iter() +or for...in!). Special contexts that apply to both: list(), sorted(), tuple() +set(), any(), all(), sum(). + +Note: iter(d.keys()) could be written as iter(d) but since the +original d.iterkeys() was also redundant we don't fix this. And there +are (rare) contexts where it makes a difference (e.g. when passing it +as an argument to a function that introspects the argument). +""" + +# Local imports +from .. import pytree +from .. import patcomp +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, LParen, RParen, ArgList, Dot +from .. import fixer_util + + +iter_exempt = fixer_util.consuming_calls | set(["iter"]) + + +class FixDict(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< head=any+ + trailer< '.' method=('keys'|'items'|'values'| + 'iterkeys'|'iteritems'|'itervalues'| + 'viewkeys'|'viewitems'|'viewvalues') > + parens=trailer< '(' ')' > + tail=any* + > + """ + + def transform(self, node, results): + head = results["head"] + method = results["method"][0] # Extract node for method name + tail = results["tail"] + syms = self.syms + method_name = method.value + isiter = method_name.startswith("iter") + isview = method_name.startswith("view") + if isiter or isview: + method_name = method_name[4:] + assert method_name in ("keys", "items", "values"), repr(method) + head = [n.clone() for n in head] + tail = [n.clone() for n in tail] + special = not tail and self.in_special_context(node, isiter) + args = head + [pytree.Node(syms.trailer, + [Dot(), + Name(method_name, + prefix=method.prefix)]), + results["parens"].clone()] + new = pytree.Node(syms.power, args) + if not (special or isview): + new.prefix = "" + new = Call(Name("iter" if isiter else "list"), [new]) + if tail: + new = pytree.Node(syms.power, [new] + tail) + new.prefix = node.prefix + return new + + P1 = "power< func=NAME trailer< '(' node=any ')' > any* >" + p1 = patcomp.compile_pattern(P1) + + P2 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + """ + p2 = patcomp.compile_pattern(P2) + + def in_special_context(self, node, isiter): + if node.parent is None: + return False + results = {} + if (node.parent.parent is not None and + self.p1.match(node.parent.parent, results) and + results["node"] is node): + if isiter: + # iter(d.iterkeys()) -> iter(d.keys()), etc. + return results["func"].value in iter_exempt + else: + # list(d.keys()) -> list(d.keys()), etc. + return results["func"].value in fixer_util.consuming_calls + if not isiter: + return False + # for ... in d.iterkeys() -> for ... in d.keys(), etc. + return self.p2.match(node.parent, results) and results["node"] is node diff --git a/lib3/2to3/lib2to3/fixes/fix_except.py b/lib3/2to3/lib2to3/fixes/fix_except.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_except.py @@ -0,0 +1,93 @@ +"""Fixer for except statements with named exceptions. + +The following cases will be converted: + +- "except E, T:" where T is a name: + + except E as T: + +- "except E, T:" where T is not a name, tuple or list: + + except E as t: + T = t + + This is done because the target of an "except" clause must be a + name. + +- "except E, T:" where T is a tuple or list literal: + + except E as t: + T = t.args +""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, syms + +def find_excepts(nodes): + for i, n in enumerate(nodes): + if n.type == syms.except_clause: + if n.children[0].value == 'except': + yield (n, nodes[i+2]) + +class FixExcept(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + try_stmt< 'try' ':' (simple_stmt | suite) + cleanup=(except_clause ':' (simple_stmt | suite))+ + tail=(['except' ':' (simple_stmt | suite)] + ['else' ':' (simple_stmt | suite)] + ['finally' ':' (simple_stmt | suite)]) > + """ + + def transform(self, node, results): + syms = self.syms + + tail = [n.clone() for n in results["tail"]] + + try_cleanup = [ch.clone() for ch in results["cleanup"]] + for except_clause, e_suite in find_excepts(try_cleanup): + if len(except_clause.children) == 4: + (E, comma, N) = except_clause.children[1:4] + comma.replace(Name("as", prefix=" ")) + + if N.type != token.NAME: + # Generate a new N for the except clause + new_N = Name(self.new_name(), prefix=" ") + target = N.clone() + target.prefix = "" + N.replace(new_N) + new_N = new_N.clone() + + # Insert "old_N = new_N" as the first statement in + # the except body. This loop skips leading whitespace + # and indents + #TODO(cwinter) suite-cleanup + suite_stmts = e_suite.children + for i, stmt in enumerate(suite_stmts): + if isinstance(stmt, pytree.Node): + break + + # The assignment is different if old_N is a tuple or list + # In that case, the assignment is old_N = new_N.args + if is_tuple(N) or is_list(N): + assign = Assign(target, Attr(new_N, Name('args'))) + else: + assign = Assign(target, new_N) + + #TODO(cwinter) stopgap until children becomes a smart list + for child in reversed(suite_stmts[:i]): + e_suite.insert_child(0, child) + e_suite.insert_child(i, assign) + elif N.prefix == "": + # No space after a comma is legal; no space after "as", + # not so much. + N.prefix = " " + + #TODO(cwinter) fix this when children becomes a smart list + children = [c.clone() for c in node.children[:3]] + try_cleanup + tail + return pytree.Node(node.type, children) diff --git a/lib3/2to3/lib2to3/fixes/fix_exec.py b/lib3/2to3/lib2to3/fixes/fix_exec.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_exec.py @@ -0,0 +1,40 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for exec. + +This converts usages of the exec statement into calls to a built-in +exec() function. + +exec code in ns1, ns2 -> exec(code, ns1, ns2) +""" + +# Local imports +from .. import pytree +from .. import fixer_base +from ..fixer_util import Comma, Name, Call + + +class FixExec(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + exec_stmt< 'exec' a=any 'in' b=any [',' c=any] > + | + exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any > + """ + + def transform(self, node, results): + assert results + syms = self.syms + a = results["a"] + b = results.get("b") + c = results.get("c") + args = [a.clone()] + args[0].prefix = "" + if b is not None: + args.extend([Comma(), b.clone()]) + if c is not None: + args.extend([Comma(), c.clone()]) + + return Call(Name("exec"), args, prefix=node.prefix) diff --git a/lib3/2to3/lib2to3/fixes/fix_execfile.py b/lib3/2to3/lib2to3/fixes/fix_execfile.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_execfile.py @@ -0,0 +1,52 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for execfile. + +This converts usages of the execfile function into calls to the built-in +exec() function. +""" + +from .. import fixer_base +from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node, + ArgList, String, syms) + + +class FixExecfile(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > > + | + power< 'execfile' trailer< '(' filename=any ')' > > + """ + + def transform(self, node, results): + assert results + filename = results["filename"] + globals = results.get("globals") + locals = results.get("locals") + + # Copy over the prefix from the right parentheses end of the execfile + # call. + execfile_paren = node.children[-1].children[-1].clone() + # Construct open().read(). + open_args = ArgList([filename.clone()], rparen=execfile_paren) + open_call = Node(syms.power, [Name("open"), open_args]) + read = [Node(syms.trailer, [Dot(), Name('read')]), + Node(syms.trailer, [LParen(), RParen()])] + open_expr = [open_call] + read + # Wrap the open call in a compile call. This is so the filename will be + # preserved in the execed code. + filename_arg = filename.clone() + filename_arg.prefix = " " + exec_str = String("'exec'", " ") + compile_args = open_expr + [Comma(), filename_arg, Comma(), exec_str] + compile_call = Call(Name("compile"), compile_args, "") + # Finally, replace the execfile call with an exec call. + args = [compile_call] + if globals is not None: + args.extend([Comma(), globals.clone()]) + if locals is not None: + args.extend([Comma(), locals.clone()]) + return Call(Name("exec"), args, prefix=node.prefix) diff --git a/lib3/2to3/lib2to3/fixes/fix_exitfunc.py b/lib3/2to3/lib2to3/fixes/fix_exitfunc.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_exitfunc.py @@ -0,0 +1,72 @@ +""" +Convert use of sys.exitfunc to use the atexit module. +""" + +# Author: Benjamin Peterson + +from lib2to3 import pytree, fixer_base +from lib2to3.fixer_util import Name, Attr, Call, Comma, Newline, syms + + +class FixExitfunc(fixer_base.BaseFix): + keep_line_order = True + BM_compatible = True + + PATTERN = """ + ( + sys_import=import_name<'import' + ('sys' + | + dotted_as_names< (any ',')* 'sys' (',' any)* > + ) + > + | + expr_stmt< + power< 'sys' trailer< '.' 'exitfunc' > > + '=' func=any > + ) + """ + + def __init__(self, *args): + super(FixExitfunc, self).__init__(*args) + + def start_tree(self, tree, filename): + super(FixExitfunc, self).start_tree(tree, filename) + self.sys_import = None + + def transform(self, node, results): + # First, find a the sys import. We'll just hope it's global scope. + if "sys_import" in results: + if self.sys_import is None: + self.sys_import = results["sys_import"] + return + + func = results["func"].clone() + func.prefix = "" + register = pytree.Node(syms.power, + Attr(Name("atexit"), Name("register")) + ) + call = Call(register, [func], node.prefix) + node.replace(call) + + if self.sys_import is None: + # That's interesting. + self.warning(node, "Can't find sys import; Please add an atexit " + "import at the top of your file.") + return + + # Now add an atexit import after the sys import. + names = self.sys_import.children[1] + if names.type == syms.dotted_as_names: + names.append_child(Comma()) + names.append_child(Name("atexit", " ")) + else: + containing_stmt = self.sys_import.parent + position = containing_stmt.children.index(self.sys_import) + stmt_container = containing_stmt.parent + new_import = pytree.Node(syms.import_name, + [Name("import"), Name("atexit", " ")] + ) + new = pytree.Node(syms.simple_stmt, [new_import]) + containing_stmt.insert_child(position + 1, Newline()) + containing_stmt.insert_child(position + 2, new) diff --git a/lib3/2to3/lib2to3/fixes/fix_filter.py b/lib3/2to3/lib2to3/fixes/fix_filter.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_filter.py @@ -0,0 +1,76 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes filter(F, X) into list(filter(F, X)). + +We avoid the transformation if the filter() call is directly contained +in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or +for V in <>:. + +NOTE: This is still not correct if the original code was depending on +filter(F, X) to return a string if X is a string and a tuple if X is a +tuple. That would require type inference, which we don't do. Let +Python 2.6 figure it out. +""" + +# Local imports +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, ListComp, in_special_context + +class FixFilter(fixer_base.ConditionalFix): + BM_compatible = True + + PATTERN = """ + filter_lambda=power< + 'filter' + trailer< + '(' + arglist< + lambdef< 'lambda' + (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any + > + ',' + it=any + > + ')' + > + > + | + power< + 'filter' + trailer< '(' arglist< none='None' ',' seq=any > ')' > + > + | + power< + 'filter' + args=trailer< '(' [any] ')' > + > + """ + + skip_on = "future_builtins.filter" + + def transform(self, node, results): + if self.should_skip(node): + return + + if "filter_lambda" in results: + new = ListComp(results.get("fp").clone(), + results.get("fp").clone(), + results.get("it").clone(), + results.get("xp").clone()) + + elif "none" in results: + new = ListComp(Name("_f"), + Name("_f"), + results["seq"].clone(), + Name("_f")) + + else: + if in_special_context(node): + return None + new = node.clone() + new.prefix = "" + new = Call(Name("list"), [new]) + new.prefix = node.prefix + return new diff --git a/lib3/2to3/lib2to3/fixes/fix_funcattrs.py b/lib3/2to3/lib2to3/fixes/fix_funcattrs.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_funcattrs.py @@ -0,0 +1,21 @@ +"""Fix function attribute names (f.func_x -> f.__x__).""" +# Author: Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixFuncattrs(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' + | 'func_name' | 'func_defaults' | 'func_code' + | 'func_dict') > any* > + """ + + def transform(self, node, results): + attr = results["attr"][0] + attr.replace(Name(("__%s__" % attr.value[5:]), + prefix=attr.prefix)) diff --git a/lib3/2to3/lib2to3/fixes/fix_future.py b/lib3/2to3/lib2to3/fixes/fix_future.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_future.py @@ -0,0 +1,22 @@ +"""Remove __future__ imports + +from __future__ import foo is replaced with an empty line. +""" +# Author: Christian Heimes + +# Local imports +from .. import fixer_base +from ..fixer_util import BlankLine + +class FixFuture(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """import_from< 'from' module_name="__future__" 'import' any >""" + + # This should be run last -- some things check for the import + run_order = 10 + + def transform(self, node, results): + new = BlankLine() + new.prefix = node.prefix + return new diff --git a/lib3/2to3/lib2to3/fixes/fix_getcwdu.py b/lib3/2to3/lib2to3/fixes/fix_getcwdu.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_getcwdu.py @@ -0,0 +1,19 @@ +""" +Fixer that changes os.getcwdu() to os.getcwd(). +""" +# Author: Victor Stinner + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixGetcwdu(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< 'os' trailer< dot='.' name='getcwdu' > any* > + """ + + def transform(self, node, results): + name = results["name"] + name.replace(Name("getcwd", prefix=name.prefix)) diff --git a/lib3/2to3/lib2to3/fixes/fix_has_key.py b/lib3/2to3/lib2to3/fixes/fix_has_key.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_has_key.py @@ -0,0 +1,110 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for has_key(). + +Calls to .has_key() methods are expressed in terms of the 'in' +operator: + + d.has_key(k) -> k in d + +CAVEATS: +1) While the primary target of this fixer is dict.has_key(), the + fixer will change any has_key() method call, regardless of its + class. + +2) Cases like this will not be converted: + + m = d.has_key + if m(k): + ... + + Only *calls* to has_key() are converted. While it is possible to + convert the above to something like + + m = d.__contains__ + if m(k): + ... + + this is currently not done. +""" + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, parenthesize + + +class FixHasKey(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + anchor=power< + before=any+ + trailer< '.' 'has_key' > + trailer< + '(' + ( not(arglist | argument) arg=any ','> + ) + ')' + > + after=any* + > + | + negation=not_test< + 'not' + anchor=power< + before=any+ + trailer< '.' 'has_key' > + trailer< + '(' + ( not(arglist | argument) arg=any ','> + ) + ')' + > + > + > + """ + + def transform(self, node, results): + assert results + syms = self.syms + if (node.parent.type == syms.not_test and + self.pattern.match(node.parent)): + # Don't transform a node matching the first alternative of the + # pattern when its parent matches the second alternative + return None + negation = results.get("negation") + anchor = results["anchor"] + prefix = node.prefix + before = [n.clone() for n in results["before"]] + arg = results["arg"].clone() + after = results.get("after") + if after: + after = [n.clone() for n in after] + if arg.type in (syms.comparison, syms.not_test, syms.and_test, + syms.or_test, syms.test, syms.lambdef, syms.argument): + arg = parenthesize(arg) + if len(before) == 1: + before = before[0] + else: + before = pytree.Node(syms.power, before) + before.prefix = " " + n_op = Name("in", prefix=" ") + if negation: + n_not = Name("not", prefix=" ") + n_op = pytree.Node(syms.comp_op, (n_not, n_op)) + new = pytree.Node(syms.comparison, (arg, n_op, before)) + if after: + new = parenthesize(new) + new = pytree.Node(syms.power, (new,) + tuple(after)) + if node.parent.type in (syms.comparison, syms.expr, syms.xor_expr, + syms.and_expr, syms.shift_expr, + syms.arith_expr, syms.term, + syms.factor, syms.power): + new = parenthesize(new) + new.prefix = prefix + return new diff --git a/lib3/2to3/lib2to3/fixes/fix_idioms.py b/lib3/2to3/lib2to3/fixes/fix_idioms.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_idioms.py @@ -0,0 +1,152 @@ +"""Adjust some old Python 2 idioms to their modern counterparts. + +* Change some type comparisons to isinstance() calls: + type(x) == T -> isinstance(x, T) + type(x) is T -> isinstance(x, T) + type(x) != T -> not isinstance(x, T) + type(x) is not T -> not isinstance(x, T) + +* Change "while 1:" into "while True:". + +* Change both + + v = list(EXPR) + v.sort() + foo(v) + +and the more general + + v = EXPR + v.sort() + foo(v) + +into + + v = sorted(EXPR) + foo(v) +""" +# Author: Jacques Frechet, Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Call, Comma, Name, Node, BlankLine, syms + +CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)" +TYPE = "power< 'type' trailer< '(' x=any ')' > >" + +class FixIdioms(fixer_base.BaseFix): + explicit = True # The user must ask for this fixer + + PATTERN = r""" + isinstance=comparison< %s %s T=any > + | + isinstance=comparison< T=any %s %s > + | + while_stmt< 'while' while='1' ':' any+ > + | + sorted=any< + any* + simple_stmt< + expr_stmt< id1=any '=' + power< list='list' trailer< '(' (not arglist) any ')' > > + > + '\n' + > + sort= + simple_stmt< + power< id2=any + trailer< '.' 'sort' > trailer< '(' ')' > + > + '\n' + > + next=any* + > + | + sorted=any< + any* + simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' > + sort= + simple_stmt< + power< id2=any + trailer< '.' 'sort' > trailer< '(' ')' > + > + '\n' + > + next=any* + > + """ % (TYPE, CMP, CMP, TYPE) + + def match(self, node): + r = super(FixIdioms, self).match(node) + # If we've matched one of the sort/sorted subpatterns above, we + # want to reject matches where the initial assignment and the + # subsequent .sort() call involve different identifiers. + if r and "sorted" in r: + if r["id1"] == r["id2"]: + return r + return None + return r + + def transform(self, node, results): + if "isinstance" in results: + return self.transform_isinstance(node, results) + elif "while" in results: + return self.transform_while(node, results) + elif "sorted" in results: + return self.transform_sort(node, results) + else: + raise RuntimeError("Invalid match") + + def transform_isinstance(self, node, results): + x = results["x"].clone() # The thing inside of type() + T = results["T"].clone() # The type being compared against + x.prefix = "" + T.prefix = " " + test = Call(Name("isinstance"), [x, Comma(), T]) + if "n" in results: + test.prefix = " " + test = Node(syms.not_test, [Name("not"), test]) + test.prefix = node.prefix + return test + + def transform_while(self, node, results): + one = results["while"] + one.replace(Name("True", prefix=one.prefix)) + + def transform_sort(self, node, results): + sort_stmt = results["sort"] + next_stmt = results["next"] + list_call = results.get("list") + simple_expr = results.get("expr") + + if list_call: + list_call.replace(Name("sorted", prefix=list_call.prefix)) + elif simple_expr: + new = simple_expr.clone() + new.prefix = "" + simple_expr.replace(Call(Name("sorted"), [new], + prefix=simple_expr.prefix)) + else: + raise RuntimeError("should not have reached here") + sort_stmt.remove() + + btwn = sort_stmt.prefix + # Keep any prefix lines between the sort_stmt and the list_call and + # shove them right after the sorted() call. + if "\n" in btwn: + if next_stmt: + # The new prefix should be everything from the sort_stmt's + # prefix up to the last newline, then the old prefix after a new + # line. + prefix_lines = (btwn.rpartition("\n")[0], next_stmt[0].prefix) + next_stmt[0].prefix = "\n".join(prefix_lines) + else: + assert list_call.parent + assert list_call.next_sibling is None + # Put a blank line after list_call and set its prefix. + end_line = BlankLine() + list_call.parent.append_child(end_line) + assert list_call.next_sibling is end_line + # The new prefix should be everything up to the first new line + # of sort_stmt's prefix. + end_line.prefix = btwn.rpartition("\n")[0] diff --git a/lib3/2to3/lib2to3/fixes/fix_import.py b/lib3/2to3/lib2to3/fixes/fix_import.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_import.py @@ -0,0 +1,99 @@ +"""Fixer for import statements. +If spam is being imported from the local directory, this import: + from spam import eggs +Becomes: + from .spam import eggs + +And this import: + import spam +Becomes: + from . import spam +""" + +# Local imports +from .. import fixer_base +from os.path import dirname, join, exists, sep +from ..fixer_util import FromImport, syms, token + + +def traverse_imports(names): + """ + Walks over all the names imported in a dotted_as_names node. + """ + pending = [names] + while pending: + node = pending.pop() + if node.type == token.NAME: + yield node.value + elif node.type == syms.dotted_name: + yield "".join([ch.value for ch in node.children]) + elif node.type == syms.dotted_as_name: + pending.append(node.children[0]) + elif node.type == syms.dotted_as_names: + pending.extend(node.children[::-2]) + else: + raise AssertionError("unkown node type") + + +class FixImport(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + import_from< 'from' imp=any 'import' ['('] any [')'] > + | + import_name< 'import' imp=any > + """ + + def start_tree(self, tree, name): + super(FixImport, self).start_tree(tree, name) + self.skip = "absolute_import" in tree.future_features + + def transform(self, node, results): + if self.skip: + return + imp = results['imp'] + + if node.type == syms.import_from: + # Some imps are top-level (eg: 'import ham') + # some are first level (eg: 'import ham.eggs') + # some are third level (eg: 'import ham.eggs as spam') + # Hence, the loop + while not hasattr(imp, 'value'): + imp = imp.children[0] + if self.probably_a_local_import(imp.value): + imp.value = "." + imp.value + imp.changed() + else: + have_local = False + have_absolute = False + for mod_name in traverse_imports(imp): + if self.probably_a_local_import(mod_name): + have_local = True + else: + have_absolute = True + if have_absolute: + if have_local: + # We won't handle both sibling and absolute imports in the + # same statement at the moment. + self.warning(node, "absolute and local imports together") + return + + new = FromImport(".", [imp]) + new.prefix = node.prefix + return new + + def probably_a_local_import(self, imp_name): + if imp_name.startswith("."): + # Relative imports are certainly not local imports. + return False + imp_name = imp_name.split(".", 1)[0] + base_path = dirname(self.filename) + base_path = join(base_path, imp_name) + # If there is no __init__.py next to the file its not in a package + # so can't be a relative import. + if not exists(join(dirname(base_path), "__init__.py")): + return False + for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]: + if exists(base_path + ext): + return True + return False diff --git a/lib3/2to3/lib2to3/fixes/fix_imports.py b/lib3/2to3/lib2to3/fixes/fix_imports.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_imports.py @@ -0,0 +1,145 @@ +"""Fix incompatible imports and module references.""" +# Authors: Collin Winter, Nick Edds + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, attr_chain + +MAPPING = {'StringIO': 'io', + 'cStringIO': 'io', + 'cPickle': 'pickle', + '__builtin__' : 'builtins', + 'copy_reg': 'copyreg', + 'Queue': 'queue', + 'SocketServer': 'socketserver', + 'ConfigParser': 'configparser', + 'repr': 'reprlib', + 'FileDialog': 'tkinter.filedialog', + 'tkFileDialog': 'tkinter.filedialog', + 'SimpleDialog': 'tkinter.simpledialog', + 'tkSimpleDialog': 'tkinter.simpledialog', + 'tkColorChooser': 'tkinter.colorchooser', + 'tkCommonDialog': 'tkinter.commondialog', + 'Dialog': 'tkinter.dialog', + 'Tkdnd': 'tkinter.dnd', + 'tkFont': 'tkinter.font', + 'tkMessageBox': 'tkinter.messagebox', + 'ScrolledText': 'tkinter.scrolledtext', + 'Tkconstants': 'tkinter.constants', + 'Tix': 'tkinter.tix', + 'ttk': 'tkinter.ttk', + 'Tkinter': 'tkinter', + 'markupbase': '_markupbase', + '_winreg': 'winreg', + 'thread': '_thread', + 'dummy_thread': '_dummy_thread', + # anydbm and whichdb are handled by fix_imports2 + 'dbhash': 'dbm.bsd', + 'dumbdbm': 'dbm.dumb', + 'dbm': 'dbm.ndbm', + 'gdbm': 'dbm.gnu', + 'xmlrpclib': 'xmlrpc.client', + 'DocXMLRPCServer': 'xmlrpc.server', + 'SimpleXMLRPCServer': 'xmlrpc.server', + 'httplib': 'http.client', + 'htmlentitydefs' : 'html.entities', + 'HTMLParser' : 'html.parser', + 'Cookie': 'http.cookies', + 'cookielib': 'http.cookiejar', + 'BaseHTTPServer': 'http.server', + 'SimpleHTTPServer': 'http.server', + 'CGIHTTPServer': 'http.server', + #'test.test_support': 'test.support', + 'commands': 'subprocess', + 'UserString' : 'collections', + 'UserList' : 'collections', + 'urlparse' : 'urllib.parse', + 'robotparser' : 'urllib.robotparser', +} + + +def alternates(members): + return "(" + "|".join(map(repr, members)) + ")" + + +def build_pattern(mapping=MAPPING): + mod_list = ' | '.join(["module_name='%s'" % key for key in mapping]) + bare_names = alternates(list(mapping.keys())) + + yield """name_import=import_name< 'import' ((%s) | + multiple_imports=dotted_as_names< any* (%s) any* >) > + """ % (mod_list, mod_list) + yield """import_from< 'from' (%s) 'import' ['('] + ( any | import_as_name< any 'as' any > | + import_as_names< any* >) [')'] > + """ % mod_list + yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > | + multiple_imports=dotted_as_names< + any* dotted_as_name< (%s) 'as' any > any* >) > + """ % (mod_list, mod_list) + + # Find usages of module members in code e.g. thread.foo(bar) + yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names + + +class FixImports(fixer_base.BaseFix): + + BM_compatible = True + keep_line_order = True + # This is overridden in fix_imports2. + mapping = MAPPING + + # We want to run this fixer late, so fix_import doesn't try to make stdlib + # renames into relative imports. + run_order = 6 + + def build_pattern(self): + return "|".join(build_pattern(self.mapping)) + + def compile_pattern(self): + # We override this, so MAPPING can be pragmatically altered and the + # changes will be reflected in PATTERN. + self.PATTERN = self.build_pattern() + super(FixImports, self).compile_pattern() + + # Don't match the node if it's within another match. + def match(self, node): + match = super(FixImports, self).match + results = match(node) + if results: + # Module usage could be in the trailer of an attribute lookup, so we + # might have nested matches when "bare_with_attr" is present. + if "bare_with_attr" not in results and \ + any(match(obj) for obj in attr_chain(node, "parent")): + return False + return results + return False + + def start_tree(self, tree, filename): + super(FixImports, self).start_tree(tree, filename) + self.replace = {} + + def transform(self, node, results): + import_mod = results.get("module_name") + if import_mod: + mod_name = import_mod.value + new_name = str(self.mapping[mod_name]) + import_mod.replace(Name(new_name, prefix=import_mod.prefix)) + if "name_import" in results: + # If it's not a "from x import x, y" or "import x as y" import, + # marked its usage to be replaced. + self.replace[mod_name] = new_name + if "multiple_imports" in results: + # This is a nasty hack to fix multiple imports on a line (e.g., + # "import StringIO, urlparse"). The problem is that I can't + # figure out an easy way to make a pattern recognize the keys of + # MAPPING randomly sprinkled in an import statement. + results = self.match(node) + if results: + self.transform(node, results) + else: + # Replace usage of the module. + bare_name = results["bare_with_attr"][0] + new_name = self.replace.get(bare_name.value) + if new_name: + bare_name.replace(Name(new_name, prefix=bare_name.prefix)) diff --git a/lib3/2to3/lib2to3/fixes/fix_imports2.py b/lib3/2to3/lib2to3/fixes/fix_imports2.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_imports2.py @@ -0,0 +1,16 @@ +"""Fix incompatible imports and module references that must be fixed after +fix_imports.""" +from . import fix_imports + + +MAPPING = { + 'whichdb': 'dbm', + 'anydbm': 'dbm', + } + + +class FixImports2(fix_imports.FixImports): + + run_order = 7 + + mapping = MAPPING diff --git a/lib3/2to3/lib2to3/fixes/fix_input.py b/lib3/2to3/lib2to3/fixes/fix_input.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_input.py @@ -0,0 +1,26 @@ +"""Fixer that changes input(...) into eval(input(...)).""" +# Author: Andre Roberge + +# Local imports +from .. import fixer_base +from ..fixer_util import Call, Name +from .. import patcomp + + +context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >") + + +class FixInput(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< 'input' args=trailer< '(' [any] ')' > > + """ + + def transform(self, node, results): + # If we're already wrapped in a eval() call, we're done. + if context.match(node.parent.parent): + return + + new = node.clone() + new.prefix = "" + return Call(Name("eval"), [new], prefix=node.prefix) diff --git a/lib3/2to3/lib2to3/fixes/fix_intern.py b/lib3/2to3/lib2to3/fixes/fix_intern.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_intern.py @@ -0,0 +1,46 @@ +# Copyright 2006 Georg Brandl. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for intern(). + +intern(s) -> sys.intern(s)""" + +# Local imports +from .. import pytree +from .. import fixer_base +from ..fixer_util import Name, Attr, touch_import + + +class FixIntern(fixer_base.BaseFix): + BM_compatible = True + order = "pre" + + PATTERN = """ + power< 'intern' + trailer< lpar='(' + ( not(arglist | argument) any ','> ) + rpar=')' > + after=any* + > + """ + + def transform(self, node, results): + syms = self.syms + obj = results["obj"].clone() + if obj.type == syms.arglist: + newarglist = obj.clone() + else: + newarglist = pytree.Node(syms.arglist, [obj.clone()]) + after = results["after"] + if after: + after = [n.clone() for n in after] + new = pytree.Node(syms.power, + Attr(Name("sys"), Name("intern")) + + [pytree.Node(syms.trailer, + [results["lpar"].clone(), + newarglist, + results["rpar"].clone()])] + after) + new.prefix = node.prefix + touch_import(None, 'sys', node) + return new diff --git a/lib3/2to3/lib2to3/fixes/fix_isinstance.py b/lib3/2to3/lib2to3/fixes/fix_isinstance.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_isinstance.py @@ -0,0 +1,52 @@ +# Copyright 2008 Armin Ronacher. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that cleans up a tuple argument to isinstance after the tokens +in it were fixed. This is mainly used to remove double occurrences of +tokens as a leftover of the long -> int / unicode -> str conversion. + +eg. isinstance(x, (int, long)) -> isinstance(x, (int, int)) + -> isinstance(x, int) +""" + +from .. import fixer_base +from ..fixer_util import token + + +class FixIsinstance(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< + 'isinstance' + trailer< '(' arglist< any ',' atom< '(' + args=testlist_gexp< any+ > + ')' > > ')' > + > + """ + + run_order = 6 + + def transform(self, node, results): + names_inserted = set() + testlist = results["args"] + args = testlist.children + new_args = [] + iterator = enumerate(args) + for idx, arg in iterator: + if arg.type == token.NAME and arg.value in names_inserted: + if idx < len(args) - 1 and args[idx + 1].type == token.COMMA: + next(iterator) + continue + else: + new_args.append(arg) + if arg.type == token.NAME: + names_inserted.add(arg.value) + if new_args and new_args[-1].type == token.COMMA: + del new_args[-1] + if len(new_args) == 1: + atom = testlist.parent + new_args[0].prefix = atom.prefix + atom.replace(new_args[0]) + else: + args[:] = new_args + node.changed() diff --git a/lib3/2to3/lib2to3/fixes/fix_itertools.py b/lib3/2to3/lib2to3/fixes/fix_itertools.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_itertools.py @@ -0,0 +1,42 @@ +""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and + itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363) + + imports from itertools are fixed in fix_itertools_import.py + + If itertools is imported as something else (ie: import itertools as it; + it.izip(spam, eggs)) method calls will not get fixed. + """ + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixItertools(fixer_base.BaseFix): + BM_compatible = True + it_funcs = "('imap'|'ifilter'|'izip'|'ifilterfalse')" + PATTERN = """ + power< it='itertools' + trailer< + dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > > + | + power< func=%(it_funcs)s trailer< '(' [any] ')' > > + """ %(locals()) + + # Needs to be run after fix_(map|zip|filter) + run_order = 6 + + def transform(self, node, results): + prefix = None + func = results['func'][0] + if 'it' in results and func.value != 'ifilterfalse': + dot, it = (results['dot'], results['it']) + # Remove the 'itertools' + prefix = it.prefix + it.remove() + # Replace the node wich contains ('.', 'function') with the + # function (to be consistant with the second part of the pattern) + dot.remove() + func.parent.replace(func) + + prefix = prefix or func.prefix + func.replace(Name(func.value[1:], prefix=prefix)) diff --git a/lib3/2to3/lib2to3/fixes/fix_itertools_imports.py b/lib3/2to3/lib2to3/fixes/fix_itertools_imports.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_itertools_imports.py @@ -0,0 +1,56 @@ +""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """ + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import BlankLine, syms, token + + +class FixItertoolsImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + import_from< 'from' 'itertools' 'import' imports=any > + """ %(locals()) + + def transform(self, node, results): + imports = results['imports'] + if imports.type == syms.import_as_name or not imports.children: + children = [imports] + else: + children = imports.children + for child in children[::2]: + if child.type == token.NAME: + member = child.value + name_node = child + elif child.type == token.STAR: + # Just leave the import as is. + return + else: + assert child.type == syms.import_as_name + name_node = child.children[0] + member_name = name_node.value + if member_name in ('imap', 'izip', 'ifilter'): + child.value = None + child.remove() + elif member_name == 'ifilterfalse': + node.changed() + name_node.value = 'filterfalse' + + # Make sure the import statement is still sane + children = imports.children[:] or [imports] + remove_comma = True + for child in children: + if remove_comma and child.type == token.COMMA: + child.remove() + else: + remove_comma ^= True + + while children and children[-1].type == token.COMMA: + children.pop().remove() + + # If there are no imports left, just get rid of the entire statement + if (not (imports.children or getattr(imports, 'value', None)) or + imports.parent is None): + p = node.prefix + node = BlankLine() + node.prefix = p + return node diff --git a/lib3/2to3/lib2to3/fixes/fix_long.py b/lib3/2to3/lib2to3/fixes/fix_long.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_long.py @@ -0,0 +1,19 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that turns 'long' into 'int' everywhere. +""" + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import is_probably_builtin + + +class FixLong(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "'long'" + + def transform(self, node, results): + if is_probably_builtin(node): + node.value = "int" + node.changed() diff --git a/lib3/2to3/lib2to3/fixes/fix_map.py b/lib3/2to3/lib2to3/fixes/fix_map.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_map.py @@ -0,0 +1,91 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes map(F, ...) into list(map(F, ...)) unless there +exists a 'from future_builtins import map' statement in the top-level +namespace. + +As a special case, map(None, X) is changed into list(X). (This is +necessary because the semantics are changed in this case -- the new +map(None, X) is equivalent to [(x,) for x in X].) + +We avoid the transformation (except for the special case mentioned +above) if the map() call is directly contained in iter(<>), list(<>), +tuple(<>), sorted(<>), ...join(<>), or for V in <>:. + +NOTE: This is still not correct if the original code was depending on +map(F, X, Y, ...) to go on until the longest argument is exhausted, +substituting None for missing values -- like zip(), it now stops as +soon as the shortest argument is exhausted. +""" + +# Local imports +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, ListComp, in_special_context +from ..pygram import python_symbols as syms + +class FixMap(fixer_base.ConditionalFix): + BM_compatible = True + + PATTERN = """ + map_none=power< + 'map' + trailer< '(' arglist< 'None' ',' arg=any [','] > ')' > + > + | + map_lambda=power< + 'map' + trailer< + '(' + arglist< + lambdef< 'lambda' + (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any + > + ',' + it=any + > + ')' + > + > + | + power< + 'map' trailer< '(' [arglist=any] ')' > + > + """ + + skip_on = 'future_builtins.map' + + def transform(self, node, results): + if self.should_skip(node): + return + + if node.parent.type == syms.simple_stmt: + self.warning(node, "You should use a for loop here") + new = node.clone() + new.prefix = "" + new = Call(Name("list"), [new]) + elif "map_lambda" in results: + new = ListComp(results["xp"].clone(), + results["fp"].clone(), + results["it"].clone()) + else: + if "map_none" in results: + new = results["arg"].clone() + else: + if "arglist" in results: + args = results["arglist"] + if args.type == syms.arglist and \ + args.children[0].type == token.NAME and \ + args.children[0].value == "None": + self.warning(node, "cannot convert map(None, ...) " + "with multiple arguments because map() " + "now truncates to the shortest sequence") + return + if in_special_context(node): + return None + new = node.clone() + new.prefix = "" + new = Call(Name("list"), [new]) + new.prefix = node.prefix + return new diff --git a/lib3/2to3/lib2to3/fixes/fix_metaclass.py b/lib3/2to3/lib2to3/fixes/fix_metaclass.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_metaclass.py @@ -0,0 +1,228 @@ +"""Fixer for __metaclass__ = X -> (metaclass=X) methods. + + The various forms of classef (inherits nothing, inherits once, inherints + many) don't parse the same in the CST so we look at ALL classes for + a __metaclass__ and if we find one normalize the inherits to all be + an arglist. + + For one-liner classes ('class X: pass') there is no indent/dedent so + we normalize those into having a suite. + + Moving the __metaclass__ into the classdef can also cause the class + body to be empty so there is some special casing for that as well. + + This fixer also tries very hard to keep original indenting and spacing + in all those corner cases. + +""" +# Author: Jack Diederich + +# Local imports +from .. import fixer_base +from ..pygram import token +from ..fixer_util import Name, syms, Node, Leaf + + +def has_metaclass(parent): + """ we have to check the cls_node without changing it. + There are two possiblities: + 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') + 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') + """ + for node in parent.children: + if node.type == syms.suite: + return has_metaclass(node) + elif node.type == syms.simple_stmt and node.children: + expr_node = node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + left_side = expr_node.children[0] + if isinstance(left_side, Leaf) and \ + left_side.value == '__metaclass__': + return True + return False + + +def fixup_parse_tree(cls_node): + """ one-line classes don't get a suite in the parse tree so we add + one to normalize the tree + """ + for node in cls_node.children: + if node.type == syms.suite: + # already in the prefered format, do nothing + return + + # !%@#! oneliners have no suite node, we have to fake one up + for i, node in enumerate(cls_node.children): + if node.type == token.COLON: + break + else: + raise ValueError("No class suite and no ':'!") + + # move everything into a suite node + suite = Node(syms.suite, []) + while cls_node.children[i+1:]: + move_node = cls_node.children[i+1] + suite.append_child(move_node.clone()) + move_node.remove() + cls_node.append_child(suite) + node = suite + + +def fixup_simple_stmt(parent, i, stmt_node): + """ if there is a semi-colon all the parts count as part of the same + simple_stmt. We just want the __metaclass__ part so we move + everything efter the semi-colon into its own simple_stmt node + """ + for semi_ind, node in enumerate(stmt_node.children): + if node.type == token.SEMI: # *sigh* + break + else: + return + + node.remove() # kill the semicolon + new_expr = Node(syms.expr_stmt, []) + new_stmt = Node(syms.simple_stmt, [new_expr]) + while stmt_node.children[semi_ind:]: + move_node = stmt_node.children[semi_ind] + new_expr.append_child(move_node.clone()) + move_node.remove() + parent.insert_child(i, new_stmt) + new_leaf1 = new_stmt.children[0].children[0] + old_leaf1 = stmt_node.children[0].children[0] + new_leaf1.prefix = old_leaf1.prefix + + +def remove_trailing_newline(node): + if node.children and node.children[-1].type == token.NEWLINE: + node.children[-1].remove() + + +def find_metas(cls_node): + # find the suite node (Mmm, sweet nodes) + for node in cls_node.children: + if node.type == syms.suite: + break + else: + raise ValueError("No class suite!") + + # look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ] + for i, simple_node in list(enumerate(node.children)): + if simple_node.type == syms.simple_stmt and simple_node.children: + expr_node = simple_node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + # Check if the expr_node is a simple assignment. + left_node = expr_node.children[0] + if isinstance(left_node, Leaf) and \ + left_node.value == '__metaclass__': + # We found a assignment to __metaclass__. + fixup_simple_stmt(node, i, simple_node) + remove_trailing_newline(simple_node) + yield (node, i, simple_node) + + +def fixup_indent(suite): + """ If an INDENT is followed by a thing with a prefix then nuke the prefix + Otherwise we get in trouble when removing __metaclass__ at suite start + """ + kids = suite.children[::-1] + # find the first indent + while kids: + node = kids.pop() + if node.type == token.INDENT: + break + + # find the first Leaf + while kids: + node = kids.pop() + if isinstance(node, Leaf) and node.type != token.DEDENT: + if node.prefix: + node.prefix = '' + return + else: + kids.extend(node.children[::-1]) + + +class FixMetaclass(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + classdef + """ + + def transform(self, node, results): + if not has_metaclass(node): + return + + fixup_parse_tree(node) + + # find metaclasses, keep the last one + last_metaclass = None + for suite, i, stmt in find_metas(node): + last_metaclass = stmt + stmt.remove() + + text_type = node.children[0].type # always Leaf(nnn, 'class') + + # figure out what kind of classdef we have + if len(node.children) == 7: + # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite]) + # 0 1 2 3 4 5 6 + if node.children[3].type == syms.arglist: + arglist = node.children[3] + # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite]) + else: + parent = node.children[3].clone() + arglist = Node(syms.arglist, [parent]) + node.set_child(3, arglist) + elif len(node.children) == 6: + # Node(classdef, ['class', 'name', '(', ')', ':', suite]) + # 0 1 2 3 4 5 + arglist = Node(syms.arglist, []) + node.insert_child(3, arglist) + elif len(node.children) == 4: + # Node(classdef, ['class', 'name', ':', suite]) + # 0 1 2 3 + arglist = Node(syms.arglist, []) + node.insert_child(2, Leaf(token.RPAR, ')')) + node.insert_child(2, arglist) + node.insert_child(2, Leaf(token.LPAR, '(')) + else: + raise ValueError("Unexpected class definition") + + # now stick the metaclass in the arglist + meta_txt = last_metaclass.children[0].children[0] + meta_txt.value = 'metaclass' + orig_meta_prefix = meta_txt.prefix + + if arglist.children: + arglist.append_child(Leaf(token.COMMA, ',')) + meta_txt.prefix = ' ' + else: + meta_txt.prefix = '' + + # compact the expression "metaclass = Meta" -> "metaclass=Meta" + expr_stmt = last_metaclass.children[0] + assert expr_stmt.type == syms.expr_stmt + expr_stmt.children[1].prefix = '' + expr_stmt.children[2].prefix = '' + + arglist.append_child(last_metaclass) + + fixup_indent(suite) + + # check for empty suite + if not suite.children: + # one-liner that was just __metaclass_ + suite.remove() + pass_leaf = Leaf(text_type, 'pass') + pass_leaf.prefix = orig_meta_prefix + node.append_child(pass_leaf) + node.append_child(Leaf(token.NEWLINE, '\n')) + + elif len(suite.children) > 1 and \ + (suite.children[-2].type == token.INDENT and + suite.children[-1].type == token.DEDENT): + # there was only one line in the class body and it was __metaclass__ + pass_leaf = Leaf(text_type, 'pass') + suite.insert_child(-1, pass_leaf) + suite.insert_child(-1, Leaf(token.NEWLINE, '\n')) diff --git a/lib3/2to3/lib2to3/fixes/fix_methodattrs.py b/lib3/2to3/lib2to3/fixes/fix_methodattrs.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_methodattrs.py @@ -0,0 +1,24 @@ +"""Fix bound method attributes (method.im_? -> method.__?__). +""" +# Author: Christian Heimes + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +MAP = { + "im_func" : "__func__", + "im_self" : "__self__", + "im_class" : "__self__.__class__" + } + +class FixMethodattrs(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* > + """ + + def transform(self, node, results): + attr = results["attr"][0] + new = str(MAP[attr.value]) + attr.replace(Name(new, prefix=attr.prefix)) diff --git a/lib3/2to3/lib2to3/fixes/fix_ne.py b/lib3/2to3/lib2to3/fixes/fix_ne.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_ne.py @@ -0,0 +1,23 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that turns <> into !=.""" + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base + + +class FixNe(fixer_base.BaseFix): + # This is so simple that we don't need the pattern compiler. + + _accept_type = token.NOTEQUAL + + def match(self, node): + # Override + return node.value == "<>" + + def transform(self, node, results): + new = pytree.Leaf(token.NOTEQUAL, "!=", prefix=node.prefix) + return new diff --git a/lib3/2to3/lib2to3/fixes/fix_next.py b/lib3/2to3/lib2to3/fixes/fix_next.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_next.py @@ -0,0 +1,103 @@ +"""Fixer for it.next() -> next(it), per PEP 3114.""" +# Author: Collin Winter + +# Things that currently aren't covered: +# - listcomp "next" names aren't warned +# - "with" statement targets aren't checked + +# Local imports +from ..pgen2 import token +from ..pygram import python_symbols as syms +from .. import fixer_base +from ..fixer_util import Name, Call, find_binding + +bind_warning = "Calls to builtin next() possibly shadowed by global binding" + + +class FixNext(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > + | + power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > + | + classdef< 'class' any+ ':' + suite< any* + funcdef< 'def' + name='next' + parameters< '(' NAME ')' > any+ > + any* > > + | + global=global_stmt< 'global' any* 'next' any* > + """ + + order = "pre" # Pre-order tree traversal + + def start_tree(self, tree, filename): + super(FixNext, self).start_tree(tree, filename) + + n = find_binding('next', tree) + if n: + self.warning(n, bind_warning) + self.shadowed_next = True + else: + self.shadowed_next = False + + def transform(self, node, results): + assert results + + base = results.get("base") + attr = results.get("attr") + name = results.get("name") + + if base: + if self.shadowed_next: + attr.replace(Name("__next__", prefix=attr.prefix)) + else: + base = [n.clone() for n in base] + base[0].prefix = "" + node.replace(Call(Name("next", prefix=node.prefix), base)) + elif name: + n = Name("__next__", prefix=name.prefix) + name.replace(n) + elif attr: + # We don't do this transformation if we're assigning to "x.next". + # Unfortunately, it doesn't seem possible to do this in PATTERN, + # so it's being done here. + if is_assign_target(node): + head = results["head"] + if "".join([str(n) for n in head]).strip() == '__builtin__': + self.warning(node, bind_warning) + return + attr.replace(Name("__next__")) + elif "global" in results: + self.warning(node, bind_warning) + self.shadowed_next = True + + +### The following functions help test if node is part of an assignment +### target. + +def is_assign_target(node): + assign = find_assign(node) + if assign is None: + return False + + for child in assign.children: + if child.type == token.EQUAL: + return False + elif is_subtree(child, node): + return True + return False + +def find_assign(node): + if node.type == syms.expr_stmt: + return node + if node.type == syms.simple_stmt or node.parent is None: + return None + return find_assign(node.parent) + +def is_subtree(root, node): + if root == node: + return True + return any(is_subtree(c, node) for c in root.children) diff --git a/lib3/2to3/lib2to3/fixes/fix_nonzero.py b/lib3/2to3/lib2to3/fixes/fix_nonzero.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_nonzero.py @@ -0,0 +1,21 @@ +"""Fixer for __nonzero__ -> __bool__ methods.""" +# Author: Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, syms + +class FixNonzero(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + classdef< 'class' any+ ':' + suite< any* + funcdef< 'def' name='__nonzero__' + parameters< '(' NAME ')' > any+ > + any* > > + """ + + def transform(self, node, results): + name = results["name"] + new = Name("__bool__", prefix=name.prefix) + name.replace(new) diff --git a/lib3/2to3/lib2to3/fixes/fix_numliterals.py b/lib3/2to3/lib2to3/fixes/fix_numliterals.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_numliterals.py @@ -0,0 +1,28 @@ +"""Fixer that turns 1L into 1, 0755 into 0o755. +""" +# Copyright 2007 Georg Brandl. +# Licensed to PSF under a Contributor Agreement. + +# Local imports +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Number + + +class FixNumliterals(fixer_base.BaseFix): + # This is so simple that we don't need the pattern compiler. + + _accept_type = token.NUMBER + + def match(self, node): + # Override + return (node.value.startswith("0") or node.value[-1] in "Ll") + + def transform(self, node, results): + val = node.value + if val[-1] in 'Ll': + val = val[:-1] + elif val.startswith('0') and val.isdigit() and len(set(val)) > 1: + val = "0o" + val[1:] + + return Number(val, prefix=node.prefix) diff --git a/lib3/2to3/lib2to3/fixes/fix_operator.py b/lib3/2to3/lib2to3/fixes/fix_operator.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_operator.py @@ -0,0 +1,99 @@ +"""Fixer for operator functions. + +operator.isCallable(obj) -> hasattr(obj, '__call__') +operator.sequenceIncludes(obj) -> operator.contains(obj) +operator.isSequenceType(obj) -> isinstance(obj, collections.Sequence) +operator.isMappingType(obj) -> isinstance(obj, collections.Mapping) +operator.isNumberType(obj) -> isinstance(obj, numbers.Number) +operator.repeat(obj, n) -> operator.mul(obj, n) +operator.irepeat(obj, n) -> operator.imul(obj, n) +""" + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import Call, Name, String, touch_import +import collections + + +def invocation(s): + def dec(f): + f.invocation = s + return f + return dec + + +class FixOperator(fixer_base.BaseFix): + BM_compatible = True + order = "pre" + + methods = """ + method=('isCallable'|'sequenceIncludes' + |'isSequenceType'|'isMappingType'|'isNumberType' + |'repeat'|'irepeat') + """ + obj = "'(' obj=any ')'" + PATTERN = """ + power< module='operator' + trailer< '.' %(methods)s > trailer< %(obj)s > > + | + power< %(methods)s trailer< %(obj)s > > + """ % dict(methods=methods, obj=obj) + + def transform(self, node, results): + method = self._check_method(node, results) + if method is not None: + return method(node, results) + + @invocation("operator.contains(%s)") + def _sequenceIncludes(self, node, results): + return self._handle_rename(node, results, "contains") + + @invocation("hasattr(%s, '__call__')") + def _isCallable(self, node, results): + obj = results["obj"] + args = [obj.clone(), String(", "), String("'__call__'")] + return Call(Name("hasattr"), args, prefix=node.prefix) + + @invocation("operator.mul(%s)") + def _repeat(self, node, results): + return self._handle_rename(node, results, "mul") + + @invocation("operator.imul(%s)") + def _irepeat(self, node, results): + return self._handle_rename(node, results, "imul") + + @invocation("isinstance(%s, collections.Sequence)") + def _isSequenceType(self, node, results): + return self._handle_type2abc(node, results, "collections", "Sequence") + + @invocation("isinstance(%s, collections.Mapping)") + def _isMappingType(self, node, results): + return self._handle_type2abc(node, results, "collections", "Mapping") + + @invocation("isinstance(%s, numbers.Number)") + def _isNumberType(self, node, results): + return self._handle_type2abc(node, results, "numbers", "Number") + + def _handle_rename(self, node, results, name): + method = results["method"][0] + method.value = name + method.changed() + + def _handle_type2abc(self, node, results, module, abc): + touch_import(None, module, node) + obj = results["obj"] + args = [obj.clone(), String(", " + ".".join([module, abc]))] + return Call(Name("isinstance"), args, prefix=node.prefix) + + def _check_method(self, node, results): + # Issue #15834: don't encode to ASCII as that breaks in translation to + # Python 3. + method = getattr(self, "_" + str(results["method"][0].value)) + if isinstance(method, collections.Callable): + if "module" in results: + return method + else: + sub = (str(results["obj"]),) + invocation_str = str(method.invocation) % sub + self.warning(node, "You should use '%s' here." % invocation_str) + return None diff --git a/lib3/2to3/lib2to3/fixes/fix_paren.py b/lib3/2to3/lib2to3/fixes/fix_paren.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_paren.py @@ -0,0 +1,44 @@ +"""Fixer that addes parentheses where they are required + +This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``.""" + +# By Taek Joo Kim and Benjamin Peterson + +# Local imports +from .. import fixer_base +from ..fixer_util import LParen, RParen + +# XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2] +class FixParen(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + atom< ('[' | '(') + (listmaker< any + comp_for< + 'for' NAME 'in' + target=testlist_safe< any (',' any)+ [','] + > + [any] + > + > + | + testlist_gexp< any + comp_for< + 'for' NAME 'in' + target=testlist_safe< any (',' any)+ [','] + > + [any] + > + >) + (']' | ')') > + """ + + def transform(self, node, results): + target = results["target"] + + lparen = LParen() + lparen.prefix = target.prefix + target.prefix = "" # Make it hug the parentheses + target.insert_child(0, lparen) + target.append_child(RParen()) diff --git a/lib3/2to3/lib2to3/fixes/fix_print.py b/lib3/2to3/lib2to3/fixes/fix_print.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_print.py @@ -0,0 +1,87 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for print. + +Change: + 'print' into 'print()' + 'print ...' into 'print(...)' + 'print ... ,' into 'print(..., end=" ")' + 'print >>x, ...' into 'print(..., file=x)' + +No changes are applied if print_function is imported from __future__ + +""" + +# Local imports +from .. import patcomp +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, Comma, String, is_tuple + + +parend_expr = patcomp.compile_pattern( + """atom< '(' [atom|STRING|NAME] ')' >""" + ) + + +class FixPrint(fixer_base.BaseFix): + + BM_compatible = True + + PATTERN = """ + simple_stmt< any* bare='print' any* > | print_stmt + """ + + def transform(self, node, results): + assert results + + bare_print = results.get("bare") + + if bare_print: + # Special-case print all by itself + bare_print.replace(Call(Name("print"), [], + prefix=bare_print.prefix)) + return + assert node.children[0] == Name("print") + args = node.children[1:] + if len(args) == 1 and parend_expr.match(args[0]): + # We don't want to keep sticking parens around an + # already-parenthesised expression. + return + + sep = end = file = None + if args and args[-1] == Comma(): + args = args[:-1] + end = " " + if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, ">>"): + assert len(args) >= 2 + file = args[1].clone() + args = args[3:] # Strip a possible comma after the file expression + # Now synthesize a print(args, sep=..., end=..., file=...) node. + l_args = [arg.clone() for arg in args] + if l_args: + l_args[0].prefix = "" + if sep is not None or end is not None or file is not None: + if sep is not None: + self.add_kwarg(l_args, "sep", String(repr(sep))) + if end is not None: + self.add_kwarg(l_args, "end", String(repr(end))) + if file is not None: + self.add_kwarg(l_args, "file", file) + n_stmt = Call(Name("print"), l_args) + n_stmt.prefix = node.prefix + return n_stmt + + def add_kwarg(self, l_nodes, s_kwd, n_expr): + # XXX All this prefix-setting may lose comments (though rarely) + n_expr.prefix = "" + n_argument = pytree.Node(self.syms.argument, + (Name(s_kwd), + pytree.Leaf(token.EQUAL, "="), + n_expr)) + if l_nodes: + l_nodes.append(Comma()) + n_argument.prefix = " " + l_nodes.append(n_argument) diff --git a/lib3/2to3/lib2to3/fixes/fix_raise.py b/lib3/2to3/lib2to3/fixes/fix_raise.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_raise.py @@ -0,0 +1,90 @@ +"""Fixer for 'raise E, V, T' + +raise -> raise +raise E -> raise E +raise E, V -> raise E(V) +raise E, V, T -> raise E(V).with_traceback(T) +raise E, None, T -> raise E.with_traceback(T) + +raise (((E, E'), E''), E'''), V -> raise E(V) +raise "foo", V, T -> warns about string exceptions + + +CAVEATS: +1) "raise E, V" will be incorrectly translated if V is an exception + instance. The correct Python 3 idiom is + + raise E from V + + but since we can't detect instance-hood by syntax alone and since + any client code would have to be changed as well, we don't automate + this. +""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, Attr, ArgList, is_tuple + +class FixRaise(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > + """ + + def transform(self, node, results): + syms = self.syms + + exc = results["exc"].clone() + if exc.type == token.STRING: + msg = "Python 3 does not support string exceptions" + self.cannot_convert(node, msg) + return + + # Python 2 supports + # raise ((((E1, E2), E3), E4), E5), V + # as a synonym for + # raise E1, V + # Since Python 3 will not support this, we recurse down any tuple + # literals, always taking the first element. + if is_tuple(exc): + while is_tuple(exc): + # exc.children[1:-1] is the unparenthesized tuple + # exc.children[1].children[0] is the first element of the tuple + exc = exc.children[1].children[0].clone() + exc.prefix = " " + + if "val" not in results: + # One-argument raise + new = pytree.Node(syms.raise_stmt, [Name("raise"), exc]) + new.prefix = node.prefix + return new + + val = results["val"].clone() + if is_tuple(val): + args = [c.clone() for c in val.children[1:-1]] + else: + val.prefix = "" + args = [val] + + if "tb" in results: + tb = results["tb"].clone() + tb.prefix = "" + + e = exc + # If there's a traceback and None is passed as the value, then don't + # add a call, since the user probably just wants to add a + # traceback. See issue #9661. + if val.type != token.NAME or val.value != "None": + e = Call(exc, args) + with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])] + new = pytree.Node(syms.simple_stmt, [Name("raise")] + with_tb) + new.prefix = node.prefix + return new + else: + return pytree.Node(syms.raise_stmt, + [Name("raise"), Call(exc, args)], + prefix=node.prefix) diff --git a/lib3/2to3/lib2to3/fixes/fix_raw_input.py b/lib3/2to3/lib2to3/fixes/fix_raw_input.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_raw_input.py @@ -0,0 +1,17 @@ +"""Fixer that changes raw_input(...) into input(...).""" +# Author: Andre Roberge + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixRawInput(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + power< name='raw_input' trailer< '(' [any] ')' > any* > + """ + + def transform(self, node, results): + name = results["name"] + name.replace(Name("input", prefix=name.prefix)) diff --git a/lib3/2to3/lib2to3/fixes/fix_reduce.py b/lib3/2to3/lib2to3/fixes/fix_reduce.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_reduce.py @@ -0,0 +1,35 @@ +# Copyright 2008 Armin Ronacher. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for reduce(). + +Makes sure reduce() is imported from the functools module if reduce is +used in that module. +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import touch_import + + + +class FixReduce(fixer_base.BaseFix): + + BM_compatible = True + order = "pre" + + PATTERN = """ + power< 'reduce' + trailer< '(' + arglist< ( + (not(argument) any ',' + not(argument + > + """ + + def transform(self, node, results): + touch_import('functools', 'reduce', node) diff --git a/lib3/2to3/lib2to3/fixes/fix_renames.py b/lib3/2to3/lib2to3/fixes/fix_renames.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_renames.py @@ -0,0 +1,70 @@ +"""Fix incompatible renames + +Fixes: + * sys.maxint -> sys.maxsize +""" +# Author: Christian Heimes +# based on Collin Winter's fix_import + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, attr_chain + +MAPPING = {"sys": {"maxint" : "maxsize"}, + } +LOOKUP = {} + +def alternates(members): + return "(" + "|".join(map(repr, members)) + ")" + + +def build_pattern(): + #bare = set() + for module, replace in list(MAPPING.items()): + for old_attr, new_attr in list(replace.items()): + LOOKUP[(module, old_attr)] = new_attr + #bare.add(module) + #bare.add(old_attr) + #yield """ + # import_name< 'import' (module=%r + # | dotted_as_names< any* module=%r any* >) > + # """ % (module, module) + yield """ + import_from< 'from' module_name=%r 'import' + ( attr_name=%r | import_as_name< attr_name=%r 'as' any >) > + """ % (module, old_attr, old_attr) + yield """ + power< module_name=%r trailer< '.' attr_name=%r > any* > + """ % (module, old_attr) + #yield """bare_name=%s""" % alternates(bare) + + +class FixRenames(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "|".join(build_pattern()) + + order = "pre" # Pre-order tree traversal + + # Don't match the node if it's within another match + def match(self, node): + match = super(FixRenames, self).match + results = match(node) + if results: + if any(match(obj) for obj in attr_chain(node, "parent")): + return False + return results + return False + + #def start_tree(self, tree, filename): + # super(FixRenames, self).start_tree(tree, filename) + # self.replace = {} + + def transform(self, node, results): + mod_name = results.get("module_name") + attr_name = results.get("attr_name") + #bare_name = results.get("bare_name") + #import_mod = results.get("module") + + if mod_name and attr_name: + new_attr = str(LOOKUP[(mod_name.value, attr_name.value)]) + attr_name.replace(Name(new_attr, prefix=attr_name.prefix)) diff --git a/lib3/2to3/lib2to3/fixes/fix_repr.py b/lib3/2to3/lib2to3/fixes/fix_repr.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_repr.py @@ -0,0 +1,23 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that transforms `xyzzy` into repr(xyzzy).""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Call, Name, parenthesize + + +class FixRepr(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + atom < '`' expr=any '`' > + """ + + def transform(self, node, results): + expr = results["expr"].clone() + + if expr.type == self.syms.testlist1: + expr = parenthesize(expr) + return Call(Name("repr"), [expr], prefix=node.prefix) diff --git a/lib3/2to3/lib2to3/fixes/fix_set_literal.py b/lib3/2to3/lib2to3/fixes/fix_set_literal.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_set_literal.py @@ -0,0 +1,53 @@ +""" +Optional fixer to transform set() calls to set literals. +""" + +# Author: Benjamin Peterson + +from lib2to3 import fixer_base, pytree +from lib2to3.fixer_util import token, syms + + + +class FixSetLiteral(fixer_base.BaseFix): + + BM_compatible = True + explicit = True + + PATTERN = """power< 'set' trailer< '(' + (atom=atom< '[' (items=listmaker< any ((',' any)* [',']) > + | + single=any) ']' > + | + atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' > + ) + ')' > > + """ + + def transform(self, node, results): + single = results.get("single") + if single: + # Make a fake listmaker + fake = pytree.Node(syms.listmaker, [single.clone()]) + single.replace(fake) + items = fake + else: + items = results["items"] + + # Build the contents of the literal + literal = [pytree.Leaf(token.LBRACE, "{")] + literal.extend(n.clone() for n in items.children) + literal.append(pytree.Leaf(token.RBRACE, "}")) + # Set the prefix of the right brace to that of the ')' or ']' + literal[-1].prefix = items.next_sibling.prefix + maker = pytree.Node(syms.dictsetmaker, literal) + maker.prefix = node.prefix + + # If the original was a one tuple, we need to remove the extra comma. + if len(maker.children) == 4: + n = maker.children[2] + n.remove() + maker.children[-1].prefix = n.prefix + + # Finally, replace the set call with our shiny new literal. + return maker diff --git a/lib3/2to3/lib2to3/fixes/fix_standarderror.py b/lib3/2to3/lib2to3/fixes/fix_standarderror.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_standarderror.py @@ -0,0 +1,18 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for StandardError -> Exception.""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixStandarderror(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + 'StandardError' + """ + + def transform(self, node, results): + return Name("Exception", prefix=node.prefix) diff --git a/lib3/2to3/lib2to3/fixes/fix_sys_exc.py b/lib3/2to3/lib2to3/fixes/fix_sys_exc.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_sys_exc.py @@ -0,0 +1,30 @@ +"""Fixer for sys.exc_{type, value, traceback} + +sys.exc_type -> sys.exc_info()[0] +sys.exc_value -> sys.exc_info()[1] +sys.exc_traceback -> sys.exc_info()[2] +""" + +# By Jeff Balogh and Benjamin Peterson + +# Local imports +from .. import fixer_base +from ..fixer_util import Attr, Call, Name, Number, Subscript, Node, syms + +class FixSysExc(fixer_base.BaseFix): + # This order matches the ordering of sys.exc_info(). + exc_info = ["exc_type", "exc_value", "exc_traceback"] + BM_compatible = True + PATTERN = """ + power< 'sys' trailer< dot='.' attribute=(%s) > > + """ % '|'.join("'%s'" % e for e in exc_info) + + def transform(self, node, results): + sys_attr = results["attribute"][0] + index = Number(self.exc_info.index(sys_attr.value)) + + call = Call(Name("exc_info"), prefix=sys_attr.prefix) + attr = Attr(Name("sys"), call) + attr[1].children[0].prefix = results["dot"].prefix + attr.append(Subscript(index)) + return Node(syms.power, attr, prefix=node.prefix) diff --git a/lib3/2to3/lib2to3/fixes/fix_throw.py b/lib3/2to3/lib2to3/fixes/fix_throw.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_throw.py @@ -0,0 +1,56 @@ +"""Fixer for generator.throw(E, V, T). + +g.throw(E) -> g.throw(E) +g.throw(E, V) -> g.throw(E(V)) +g.throw(E, V, T) -> g.throw(E(V).with_traceback(T)) + +g.throw("foo"[, V[, T]]) will warn about string exceptions.""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, ArgList, Attr, is_tuple + +class FixThrow(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< any trailer< '.' 'throw' > + trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' > + > + | + power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > > + """ + + def transform(self, node, results): + syms = self.syms + + exc = results["exc"].clone() + if exc.type is token.STRING: + self.cannot_convert(node, "Python 3 does not support string exceptions") + return + + # Leave "g.throw(E)" alone + val = results.get("val") + if val is None: + return + + val = val.clone() + if is_tuple(val): + args = [c.clone() for c in val.children[1:-1]] + else: + val.prefix = "" + args = [val] + + throw_args = results["args"] + + if "tb" in results: + tb = results["tb"].clone() + tb.prefix = "" + + e = Call(exc, args) + with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])] + throw_args.replace(pytree.Node(syms.power, with_tb)) + else: + throw_args.replace(Call(exc, args)) diff --git a/lib3/2to3/lib2to3/fixes/fix_tuple_params.py b/lib3/2to3/lib2to3/fixes/fix_tuple_params.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_tuple_params.py @@ -0,0 +1,175 @@ +"""Fixer for function definitions with tuple parameters. + +def func(((a, b), c), d): + ... + + -> + +def func(x, d): + ((a, b), c) = x + ... + +It will also support lambdas: + + lambda (x, y): x + y -> lambda t: t[0] + t[1] + + # The parens are a syntax error in Python 3 + lambda (x): x + y -> lambda x: x + y +""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Assign, Name, Newline, Number, Subscript, syms + +def is_docstring(stmt): + return isinstance(stmt, pytree.Node) and \ + stmt.children[0].type == token.STRING + +class FixTupleParams(fixer_base.BaseFix): + run_order = 4 #use a lower order since lambda is part of other + #patterns + BM_compatible = True + + PATTERN = """ + funcdef< 'def' any parameters< '(' args=any ')' > + ['->' any] ':' suite=any+ > + | + lambda= + lambdef< 'lambda' args=vfpdef< '(' inner=any ')' > + ':' body=any + > + """ + + def transform(self, node, results): + if "lambda" in results: + return self.transform_lambda(node, results) + + new_lines = [] + suite = results["suite"] + args = results["args"] + # This crap is so "def foo(...): x = 5; y = 7" is handled correctly. + # TODO(cwinter): suite-cleanup + if suite[0].children[1].type == token.INDENT: + start = 2 + indent = suite[0].children[1].value + end = Newline() + else: + start = 0 + indent = "; " + end = pytree.Leaf(token.INDENT, "") + + # We need access to self for new_name(), and making this a method + # doesn't feel right. Closing over self and new_lines makes the + # code below cleaner. + def handle_tuple(tuple_arg, add_prefix=False): + n = Name(self.new_name()) + arg = tuple_arg.clone() + arg.prefix = "" + stmt = Assign(arg, n.clone()) + if add_prefix: + n.prefix = " " + tuple_arg.replace(n) + new_lines.append(pytree.Node(syms.simple_stmt, + [stmt, end.clone()])) + + if args.type == syms.tfpdef: + handle_tuple(args) + elif args.type == syms.typedargslist: + for i, arg in enumerate(args.children): + if arg.type == syms.tfpdef: + # Without add_prefix, the emitted code is correct, + # just ugly. + handle_tuple(arg, add_prefix=(i > 0)) + + if not new_lines: + return + + # This isn't strictly necessary, but it plays nicely with other fixers. + # TODO(cwinter) get rid of this when children becomes a smart list + for line in new_lines: + line.parent = suite[0] + + # TODO(cwinter) suite-cleanup + after = start + if start == 0: + new_lines[0].prefix = " " + elif is_docstring(suite[0].children[start]): + new_lines[0].prefix = indent + after = start + 1 + + for line in new_lines: + line.parent = suite[0] + suite[0].children[after:after] = new_lines + for i in range(after+1, after+len(new_lines)+1): + suite[0].children[i].prefix = indent + suite[0].changed() + + def transform_lambda(self, node, results): + args = results["args"] + body = results["body"] + inner = simplify_args(results["inner"]) + + # Replace lambda ((((x)))): x with lambda x: x + if inner.type == token.NAME: + inner = inner.clone() + inner.prefix = " " + args.replace(inner) + return + + params = find_params(args) + to_index = map_to_index(params) + tup_name = self.new_name(tuple_name(params)) + + new_param = Name(tup_name, prefix=" ") + args.replace(new_param.clone()) + for n in body.post_order(): + if n.type == token.NAME and n.value in to_index: + subscripts = [c.clone() for c in to_index[n.value]] + new = pytree.Node(syms.power, + [new_param.clone()] + subscripts) + new.prefix = n.prefix + n.replace(new) + + +### Helper functions for transform_lambda() + +def simplify_args(node): + if node.type in (syms.vfplist, token.NAME): + return node + elif node.type == syms.vfpdef: + # These look like vfpdef< '(' x ')' > where x is NAME + # or another vfpdef instance (leading to recursion). + while node.type == syms.vfpdef: + node = node.children[1] + return node + raise RuntimeError("Received unexpected node %s" % node) + +def find_params(node): + if node.type == syms.vfpdef: + return find_params(node.children[1]) + elif node.type == token.NAME: + return node.value + return [find_params(c) for c in node.children if c.type != token.COMMA] + +def map_to_index(param_list, prefix=[], d=None): + if d is None: + d = {} + for i, obj in enumerate(param_list): + trailer = [Subscript(Number(str(i)))] + if isinstance(obj, list): + map_to_index(obj, trailer, d=d) + else: + d[obj] = prefix + trailer + return d + +def tuple_name(param_list): + l = [] + for obj in param_list: + if isinstance(obj, list): + l.append(tuple_name(obj)) + else: + l.append(obj) + return "_".join(l) diff --git a/lib3/2to3/lib2to3/fixes/fix_types.py b/lib3/2to3/lib2to3/fixes/fix_types.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_types.py @@ -0,0 +1,62 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for removing uses of the types module. + +These work for only the known names in the types module. The forms above +can include types. or not. ie, It is assumed the module is imported either as: + + import types + from types import ... # either * or specific types + +The import statements are not modified. + +There should be another fixer that handles at least the following constants: + + type([]) -> list + type(()) -> tuple + type('') -> str + +""" + +# Local imports +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name + +_TYPE_MAPPING = { + 'BooleanType' : 'bool', + 'BufferType' : 'memoryview', + 'ClassType' : 'type', + 'ComplexType' : 'complex', + 'DictType': 'dict', + 'DictionaryType' : 'dict', + 'EllipsisType' : 'type(Ellipsis)', + #'FileType' : 'io.IOBase', + 'FloatType': 'float', + 'IntType': 'int', + 'ListType': 'list', + 'LongType': 'int', + 'ObjectType' : 'object', + 'NoneType': 'type(None)', + 'NotImplementedType' : 'type(NotImplemented)', + 'SliceType' : 'slice', + 'StringType': 'bytes', # XXX ? + 'StringTypes' : 'str', # XXX ? + 'TupleType': 'tuple', + 'TypeType' : 'type', + 'UnicodeType': 'str', + 'XRangeType' : 'range', + } + +_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING] + +class FixTypes(fixer_base.BaseFix): + BM_compatible = True + PATTERN = '|'.join(_pats) + + def transform(self, node, results): + new_value = str(_TYPE_MAPPING.get(results["name"].value)) + if new_value: + return Name(new_value, prefix=node.prefix) + return None diff --git a/lib3/2to3/lib2to3/fixes/fix_unicode.py b/lib3/2to3/lib2to3/fixes/fix_unicode.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_unicode.py @@ -0,0 +1,25 @@ +"""Fixer that changes unicode to str, unichr to chr, and u"..." into "...". + +""" + +import re +from ..pgen2 import token +from .. import fixer_base + +_mapping = {"unichr" : "chr", "unicode" : "str"} +_literal_re = re.compile(r"[uU][rR]?[\'\"]") + +class FixUnicode(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "STRING | 'unicode' | 'unichr'" + + def transform(self, node, results): + if node.type == token.NAME: + new = node.clone() + new.value = _mapping[node.value] + return new + elif node.type == token.STRING: + if _literal_re.match(node.value): + new = node.clone() + new.value = new.value[1:] + return new diff --git a/lib3/2to3/lib2to3/fixes/fix_urllib.py b/lib3/2to3/lib2to3/fixes/fix_urllib.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_urllib.py @@ -0,0 +1,197 @@ +"""Fix changes imports of urllib which are now incompatible. + This is rather similar to fix_imports, but because of the more + complex nature of the fixing for urllib, it has its own fixer. +""" +# Author: Nick Edds + +# Local imports +from lib2to3.fixes.fix_imports import alternates, FixImports +from lib2to3 import fixer_base +from lib2to3.fixer_util import (Name, Comma, FromImport, Newline, + find_indentation, Node, syms) + +MAPPING = {"urllib": [ + ("urllib.request", + ["URLopener", "FancyURLopener", "urlretrieve", + "_urlopener", "urlopen", "urlcleanup", + "pathname2url", "url2pathname"]), + ("urllib.parse", + ["quote", "quote_plus", "unquote", "unquote_plus", + "urlencode", "splitattr", "splithost", "splitnport", + "splitpasswd", "splitport", "splitquery", "splittag", + "splittype", "splituser", "splitvalue", ]), + ("urllib.error", + ["ContentTooShortError"])], + "urllib2" : [ + ("urllib.request", + ["urlopen", "install_opener", "build_opener", + "Request", "OpenerDirector", "BaseHandler", + "HTTPDefaultErrorHandler", "HTTPRedirectHandler", + "HTTPCookieProcessor", "ProxyHandler", + "HTTPPasswordMgr", + "HTTPPasswordMgrWithDefaultRealm", + "AbstractBasicAuthHandler", + "HTTPBasicAuthHandler", "ProxyBasicAuthHandler", + "AbstractDigestAuthHandler", + "HTTPDigestAuthHandler", "ProxyDigestAuthHandler", + "HTTPHandler", "HTTPSHandler", "FileHandler", + "FTPHandler", "CacheFTPHandler", + "UnknownHandler"]), + ("urllib.error", + ["URLError", "HTTPError"]), + ] +} + +# Duplicate the url parsing functions for urllib2. +MAPPING["urllib2"].append(MAPPING["urllib"][1]) + + +def build_pattern(): + bare = set() + for old_module, changes in list(MAPPING.items()): + for change in changes: + new_module, members = change + members = alternates(members) + yield """import_name< 'import' (module=%r + | dotted_as_names< any* module=%r any* >) > + """ % (old_module, old_module) + yield """import_from< 'from' mod_member=%r 'import' + ( member=%s | import_as_name< member=%s 'as' any > | + import_as_names< members=any* >) > + """ % (old_module, members, members) + yield """import_from< 'from' module_star=%r 'import' star='*' > + """ % old_module + yield """import_name< 'import' + dotted_as_name< module_as=%r 'as' any > > + """ % old_module + # bare_with_attr has a special significance for FixImports.match(). + yield """power< bare_with_attr=%r trailer< '.' member=%s > any* > + """ % (old_module, members) + + +class FixUrllib(FixImports): + + def build_pattern(self): + return "|".join(build_pattern()) + + def transform_import(self, node, results): + """Transform for the basic import case. Replaces the old + import name with a comma separated list of its + replacements. + """ + import_mod = results.get("module") + pref = import_mod.prefix + + names = [] + + # create a Node list of the replacement modules + for name in MAPPING[import_mod.value][:-1]: + names.extend([Name(name[0], prefix=pref), Comma()]) + names.append(Name(MAPPING[import_mod.value][-1][0], prefix=pref)) + import_mod.replace(names) + + def transform_member(self, node, results): + """Transform for imports of specific module elements. Replaces + the module to be imported from with the appropriate new + module. + """ + mod_member = results.get("mod_member") + pref = mod_member.prefix + member = results.get("member") + + # Simple case with only a single member being imported + if member: + # this may be a list of length one, or just a node + if isinstance(member, list): + member = member[0] + new_name = None + for change in MAPPING[mod_member.value]: + if member.value in change[1]: + new_name = change[0] + break + if new_name: + mod_member.replace(Name(new_name, prefix=pref)) + else: + self.cannot_convert(node, "This is an invalid module element") + + # Multiple members being imported + else: + # a dictionary for replacements, order matters + modules = [] + mod_dict = {} + members = results["members"] + for member in members: + # we only care about the actual members + if member.type == syms.import_as_name: + as_name = member.children[2].value + member_name = member.children[0].value + else: + member_name = member.value + as_name = None + if member_name != ",": + for change in MAPPING[mod_member.value]: + if member_name in change[1]: + if change[0] not in mod_dict: + modules.append(change[0]) + mod_dict.setdefault(change[0], []).append(member) + + new_nodes = [] + indentation = find_indentation(node) + first = True + def handle_name(name, prefix): + if name.type == syms.import_as_name: + kids = [Name(name.children[0].value, prefix=prefix), + name.children[1].clone(), + name.children[2].clone()] + return [Node(syms.import_as_name, kids)] + return [Name(name.value, prefix=prefix)] + for module in modules: + elts = mod_dict[module] + names = [] + for elt in elts[:-1]: + names.extend(handle_name(elt, pref)) + names.append(Comma()) + names.extend(handle_name(elts[-1], pref)) + new = FromImport(module, names) + if not first or node.parent.prefix.endswith(indentation): + new.prefix = indentation + new_nodes.append(new) + first = False + if new_nodes: + nodes = [] + for new_node in new_nodes[:-1]: + nodes.extend([new_node, Newline()]) + nodes.append(new_nodes[-1]) + node.replace(nodes) + else: + self.cannot_convert(node, "All module elements are invalid") + + def transform_dot(self, node, results): + """Transform for calls to module members in code.""" + module_dot = results.get("bare_with_attr") + member = results.get("member") + new_name = None + if isinstance(member, list): + member = member[0] + for change in MAPPING[module_dot.value]: + if member.value in change[1]: + new_name = change[0] + break + if new_name: + module_dot.replace(Name(new_name, + prefix=module_dot.prefix)) + else: + self.cannot_convert(node, "This is an invalid module element") + + def transform(self, node, results): + if results.get("module"): + self.transform_import(node, results) + elif results.get("mod_member"): + self.transform_member(node, results) + elif results.get("bare_with_attr"): + self.transform_dot(node, results) + # Renaming and star imports are not supported for these modules. + elif results.get("module_star"): + self.cannot_convert(node, "Cannot handle star imports.") + elif results.get("module_as"): + self.cannot_convert(node, "This module is now multiple modules") diff --git a/lib3/2to3/lib2to3/fixes/fix_ws_comma.py b/lib3/2to3/lib2to3/fixes/fix_ws_comma.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_ws_comma.py @@ -0,0 +1,39 @@ +"""Fixer that changes 'a ,b' into 'a, b'. + +This also changes '{a :b}' into '{a: b}', but does not touch other +uses of colons. It does not touch other uses of whitespace. + +""" + +from .. import pytree +from ..pgen2 import token +from .. import fixer_base + +class FixWsComma(fixer_base.BaseFix): + + explicit = True # The user must ask for this fixers + + PATTERN = """ + any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]> + """ + + COMMA = pytree.Leaf(token.COMMA, ",") + COLON = pytree.Leaf(token.COLON, ":") + SEPS = (COMMA, COLON) + + def transform(self, node, results): + new = node.clone() + comma = False + for child in new.children: + if child in self.SEPS: + prefix = child.prefix + if prefix.isspace() and "\n" not in prefix: + child.prefix = "" + comma = True + else: + if comma: + prefix = child.prefix + if not prefix: + child.prefix = " " + comma = False + return new diff --git a/lib3/2to3/lib2to3/fixes/fix_xrange.py b/lib3/2to3/lib2to3/fixes/fix_xrange.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_xrange.py @@ -0,0 +1,73 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes xrange(...) into range(...).""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, Call, consuming_calls +from .. import patcomp + + +class FixXrange(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< + (name='range'|name='xrange') trailer< '(' args=any ')' > + rest=any* > + """ + + def start_tree(self, tree, filename): + super(FixXrange, self).start_tree(tree, filename) + self.transformed_xranges = set() + + def finish_tree(self, tree, filename): + self.transformed_xranges = None + + def transform(self, node, results): + name = results["name"] + if name.value == "xrange": + return self.transform_xrange(node, results) + elif name.value == "range": + return self.transform_range(node, results) + else: + raise ValueError(repr(name)) + + def transform_xrange(self, node, results): + name = results["name"] + name.replace(Name("range", prefix=name.prefix)) + # This prevents the new range call from being wrapped in a list later. + self.transformed_xranges.add(id(node)) + + def transform_range(self, node, results): + if (id(node) not in self.transformed_xranges and + not self.in_special_context(node)): + range_call = Call(Name("range"), [results["args"].clone()]) + # Encase the range call in list(). + list_call = Call(Name("list"), [range_call], + prefix=node.prefix) + # Put things that were after the range() call after the list call. + for n in results["rest"]: + list_call.append_child(n) + return list_call + + P1 = "power< func=NAME trailer< '(' node=any ')' > any* >" + p1 = patcomp.compile_pattern(P1) + + P2 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + | comparison< any 'in' node=any any*> + """ + p2 = patcomp.compile_pattern(P2) + + def in_special_context(self, node): + if node.parent is None: + return False + results = {} + if (node.parent.parent is not None and + self.p1.match(node.parent.parent, results) and + results["node"] is node): + # list(d.keys()) -> list(d.keys()), etc. + return results["func"].value in consuming_calls + # for ... in d.iterkeys() -> for ... in d.keys(), etc. + return self.p2.match(node.parent, results) and results["node"] is node diff --git a/lib3/2to3/lib2to3/fixes/fix_xreadlines.py b/lib3/2to3/lib2to3/fixes/fix_xreadlines.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_xreadlines.py @@ -0,0 +1,25 @@ +"""Fix "for x in f.xreadlines()" -> "for x in f". + +This fixer will also convert g(f.xreadlines) into g(f.__iter__).""" +# Author: Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixXreadlines(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > > + | + power< any+ trailer< '.' no_call='xreadlines' > > + """ + + def transform(self, node, results): + no_call = results.get("no_call") + + if no_call: + no_call.replace(Name("__iter__", prefix=no_call.prefix)) + else: + node.replace([x.clone() for x in results["call"]]) diff --git a/lib3/2to3/lib2to3/fixes/fix_zip.py b/lib3/2to3/lib2to3/fixes/fix_zip.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/fixes/fix_zip.py @@ -0,0 +1,35 @@ +""" +Fixer that changes zip(seq0, seq1, ...) into list(zip(seq0, seq1, ...) +unless there exists a 'from future_builtins import zip' statement in the +top-level namespace. + +We avoid the transformation if the zip() call is directly contained in +iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. +""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, Call, in_special_context + +class FixZip(fixer_base.ConditionalFix): + + BM_compatible = True + PATTERN = """ + power< 'zip' args=trailer< '(' [any] ')' > + > + """ + + skip_on = "future_builtins.zip" + + def transform(self, node, results): + if self.should_skip(node): + return + + if in_special_context(node): + return None + + new = node.clone() + new.prefix = "" + new = Call(Name("list"), [new]) + new.prefix = node.prefix + return new diff --git a/lib3/2to3/lib2to3/main.py b/lib3/2to3/lib2to3/main.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/main.py @@ -0,0 +1,182 @@ +""" +Main program for 2to3. +""" + + + +import sys +import os +import difflib +import logging +import shutil +import optparse + +from . import refactor + + +def diff_texts(a, b, filename): + """Return a unified diff of two strings.""" + a = a.splitlines() + b = b.splitlines() + return difflib.unified_diff(a, b, filename, filename, + "(original)", "(refactored)", + lineterm="") + + +class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): + """ + Prints output to stdout. + """ + + def __init__(self, fixers, options, explicit, nobackups, show_diffs): + self.nobackups = nobackups + self.show_diffs = show_diffs + super(StdoutRefactoringTool, self).__init__(fixers, options, explicit) + + def log_error(self, msg, *args, **kwargs): + self.errors.append((msg, args, kwargs)) + self.logger.error(msg, *args, **kwargs) + + def write_file(self, new_text, filename, old_text, encoding): + if not self.nobackups: + # Make backup + backup = filename + ".bak" + if os.path.lexists(backup): + try: + os.remove(backup) + except os.error as err: + self.log_message("Can't remove backup %s", backup) + try: + os.rename(filename, backup) + except os.error as err: + self.log_message("Can't rename %s to %s", filename, backup) + # Actually write the new file + write = super(StdoutRefactoringTool, self).write_file + write(new_text, filename, old_text, encoding) + if not self.nobackups: + shutil.copymode(backup, filename) + + def print_output(self, old, new, filename, equal): + if equal: + self.log_message("No changes to %s", filename) + else: + self.log_message("Refactored %s", filename) + if self.show_diffs: + diff_lines = diff_texts(old, new, filename) + try: + if self.output_lock is not None: + with self.output_lock: + for line in diff_lines: + print(line) + sys.stdout.flush() + else: + for line in diff_lines: + print(line) + except UnicodeEncodeError: + warn("couldn't encode %s's diff for your terminal" % + (filename,)) + return + + +def warn(msg): + print("WARNING: %s" % (msg,), file=sys.stderr) + + +def main(fixer_pkg, args=None): + """Main program. + + Args: + fixer_pkg: the name of a package where the fixers are located. + args: optional; a list of command line arguments. If omitted, + sys.argv[1:] is used. + + Returns a suggested exit status (0, 1, 2). + """ + # Set up option parser + parser = optparse.OptionParser(usage="2to3 [options] file|dir ...") + parser.add_option("-d", "--doctests_only", action="store_true", + help="Fix up doctests only") + parser.add_option("-f", "--fix", action="append", default=[], + help="Each FIX specifies a transformation; default: all") + parser.add_option("-j", "--processes", action="store", default=1, + type="int", help="Run 2to3 concurrently") + parser.add_option("-x", "--nofix", action="append", default=[], + help="Prevent a transformation from being run") + parser.add_option("-l", "--list-fixes", action="store_true", + help="List available transformations") + parser.add_option("-p", "--print-function", action="store_true", + help="Modify the grammar so that print() is a function") + parser.add_option("-v", "--verbose", action="store_true", + help="More verbose logging") + parser.add_option("--no-diffs", action="store_true", + help="Don't show diffs of the refactoring") + parser.add_option("-w", "--write", action="store_true", + help="Write back modified files") + parser.add_option("-n", "--nobackups", action="store_true", default=False, + help="Don't write backups for modified files") + + # Parse command line arguments + refactor_stdin = False + flags = {} + options, args = parser.parse_args(args) + if not options.write and options.no_diffs: + warn("not writing files and not printing diffs; that's not very useful") + if not options.write and options.nobackups: + parser.error("Can't use -n without -w") + if options.list_fixes: + print("Available transformations for the -f/--fix option:") + for fixname in refactor.get_all_fix_names(fixer_pkg): + print(fixname) + if not args: + return 0 + if not args: + print("At least one file or directory argument required.", file=sys.stderr) + print("Use --help to show usage.", file=sys.stderr) + return 2 + if "-" in args: + refactor_stdin = True + if options.write: + print("Can't write to stdin.", file=sys.stderr) + return 2 + if options.print_function: + flags["print_function"] = True + + # Set up logging handler + level = logging.DEBUG if options.verbose else logging.INFO + logging.basicConfig(format='%(name)s: %(message)s', level=level) + + # Initialize the refactoring tool + avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg)) + unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix) + explicit = set() + if options.fix: + all_present = False + for fix in options.fix: + if fix == "all": + all_present = True + else: + explicit.add(fixer_pkg + ".fix_" + fix) + requested = avail_fixes.union(explicit) if all_present else explicit + else: + requested = avail_fixes.union(explicit) + fixer_names = requested.difference(unwanted_fixes) + rt = StdoutRefactoringTool(sorted(fixer_names), flags, sorted(explicit), + options.nobackups, not options.no_diffs) + + # Refactor all files and directories passed as arguments + if not rt.errors: + if refactor_stdin: + rt.refactor_stdin() + else: + try: + rt.refactor(args, options.write, options.doctests_only, + options.processes) + except refactor.MultiprocessingUnsupported: + assert options.processes > 1 + print("Sorry, -j isn't " \ + "supported on this platform.", file=sys.stderr) + return 1 + rt.summarize() + + # Return error status (0 if rt.errors is zero) + return int(bool(rt.errors)) diff --git a/lib3/2to3/lib2to3/patcomp.py b/lib3/2to3/lib2to3/patcomp.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/patcomp.py @@ -0,0 +1,204 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Pattern compiler. + +The grammer is taken from PatternGrammar.txt. + +The compiler compiles a pattern to a pytree.*Pattern instance. +""" + +__author__ = "Guido van Rossum " + +# Python imports +import os + +# Fairly local imports +from .pgen2 import driver, literals, token, tokenize, parse, grammar + +# Really local imports +from . import pytree +from . import pygram + +# The pattern grammar file +_PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), + "PatternGrammar.txt") + + +class PatternSyntaxError(Exception): + pass + + +def tokenize_wrapper(input): + """Tokenizes a string suppressing significant whitespace.""" + skip = set((token.NEWLINE, token.INDENT, token.DEDENT)) + tokens = tokenize.generate_tokens(driver.generate_lines(input).__next__) + for quintuple in tokens: + type, value, start, end, line_text = quintuple + if type not in skip: + yield quintuple + + +class PatternCompiler(object): + + def __init__(self, grammar_file=_PATTERN_GRAMMAR_FILE): + """Initializer. + + Takes an optional alternative filename for the pattern grammar. + """ + self.grammar = driver.load_grammar(grammar_file) + self.syms = pygram.Symbols(self.grammar) + self.pygrammar = pygram.python_grammar + self.pysyms = pygram.python_symbols + self.driver = driver.Driver(self.grammar, convert=pattern_convert) + + def compile_pattern(self, input, debug=False, with_tree=False): + """Compiles a pattern string to a nested pytree.*Pattern object.""" + tokens = tokenize_wrapper(input) + try: + root = self.driver.parse_tokens(tokens, debug=debug) + except parse.ParseError as e: + raise PatternSyntaxError(str(e)) + if with_tree: + return self.compile_node(root), root + else: + return self.compile_node(root) + + def compile_node(self, node): + """Compiles a node, recursively. + + This is one big switch on the node type. + """ + # XXX Optimize certain Wildcard-containing-Wildcard patterns + # that can be merged + if node.type == self.syms.Matcher: + node = node.children[0] # Avoid unneeded recursion + + if node.type == self.syms.Alternatives: + # Skip the odd children since they are just '|' tokens + alts = [self.compile_node(ch) for ch in node.children[::2]] + if len(alts) == 1: + return alts[0] + p = pytree.WildcardPattern([[a] for a in alts], min=1, max=1) + return p.optimize() + + if node.type == self.syms.Alternative: + units = [self.compile_node(ch) for ch in node.children] + if len(units) == 1: + return units[0] + p = pytree.WildcardPattern([units], min=1, max=1) + return p.optimize() + + if node.type == self.syms.NegatedUnit: + pattern = self.compile_basic(node.children[1:]) + p = pytree.NegatedPattern(pattern) + return p.optimize() + + assert node.type == self.syms.Unit + + name = None + nodes = node.children + if len(nodes) >= 3 and nodes[1].type == token.EQUAL: + name = nodes[0].value + nodes = nodes[2:] + repeat = None + if len(nodes) >= 2 and nodes[-1].type == self.syms.Repeater: + repeat = nodes[-1] + nodes = nodes[:-1] + + # Now we've reduced it to: STRING | NAME [Details] | (...) | [...] + pattern = self.compile_basic(nodes, repeat) + + if repeat is not None: + assert repeat.type == self.syms.Repeater + children = repeat.children + child = children[0] + if child.type == token.STAR: + min = 0 + max = pytree.HUGE + elif child.type == token.PLUS: + min = 1 + max = pytree.HUGE + elif child.type == token.LBRACE: + assert children[-1].type == token.RBRACE + assert len(children) in (3, 5) + min = max = self.get_int(children[1]) + if len(children) == 5: + max = self.get_int(children[3]) + else: + assert False + if min != 1 or max != 1: + pattern = pattern.optimize() + pattern = pytree.WildcardPattern([[pattern]], min=min, max=max) + + if name is not None: + pattern.name = name + return pattern.optimize() + + def compile_basic(self, nodes, repeat=None): + # Compile STRING | NAME [Details] | (...) | [...] + assert len(nodes) >= 1 + node = nodes[0] + if node.type == token.STRING: + value = str(literals.evalString(node.value)) + return pytree.LeafPattern(_type_of_literal(value), value) + elif node.type == token.NAME: + value = node.value + if value.isupper(): + if value not in TOKEN_MAP: + raise PatternSyntaxError("Invalid token: %r" % value) + if nodes[1:]: + raise PatternSyntaxError("Can't have details for token") + return pytree.LeafPattern(TOKEN_MAP[value]) + else: + if value == "any": + type = None + elif not value.startswith("_"): + type = getattr(self.pysyms, value, None) + if type is None: + raise PatternSyntaxError("Invalid symbol: %r" % value) + if nodes[1:]: # Details present + content = [self.compile_node(nodes[1].children[1])] + else: + content = None + return pytree.NodePattern(type, content) + elif node.value == "(": + return self.compile_node(nodes[1]) + elif node.value == "[": + assert repeat is None + subpattern = self.compile_node(nodes[1]) + return pytree.WildcardPattern([[subpattern]], min=0, max=1) + assert False, node + + def get_int(self, node): + assert node.type == token.NUMBER + return int(node.value) + + +# Map named tokens to the type value for a LeafPattern +TOKEN_MAP = {"NAME": token.NAME, + "STRING": token.STRING, + "NUMBER": token.NUMBER, + "TOKEN": None} + + +def _type_of_literal(value): + if value[0].isalpha(): + return token.NAME + elif value in grammar.opmap: + return grammar.opmap[value] + else: + return None + + +def pattern_convert(grammar, raw_node_info): + """Converts raw node information to a Node or Leaf instance.""" + type, value, context, children = raw_node_info + if children or type in grammar.number2symbol: + return pytree.Node(type, children, context=context) + else: + return pytree.Leaf(type, value, context=context) + + +def compile_pattern(pattern): + return PatternCompiler().compile_pattern(pattern) diff --git a/lib3/2to3/lib2to3/pgen2/__init__.py b/lib3/2to3/lib2to3/pgen2/__init__.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/pgen2/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""The pgen2 package.""" diff --git a/lib3/2to3/lib2to3/pgen2/conv.py b/lib3/2to3/lib2to3/pgen2/conv.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/pgen2/conv.py @@ -0,0 +1,257 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Convert graminit.[ch] spit out by pgen to Python code. + +Pgen is the Python parser generator. It is useful to quickly create a +parser from a grammar file in Python's grammar notation. But I don't +want my parsers to be written in C (yet), so I'm translating the +parsing tables to Python data structures and writing a Python parse +engine. + +Note that the token numbers are constants determined by the standard +Python tokenizer. The standard token module defines these numbers and +their names (the names are not used much). The token numbers are +hardcoded into the Python tokenizer and into pgen. A Python +implementation of the Python tokenizer is also available, in the +standard tokenize module. + +On the other hand, symbol numbers (representing the grammar's +non-terminals) are assigned by pgen based on the actual grammar +input. + +Note: this module is pretty much obsolete; the pgen module generates +equivalent grammar tables directly from the Grammar.txt input file +without having to invoke the Python pgen C program. + +""" + +# Python imports +import re + +# Local imports +from pgen2 import grammar, token + + +class Converter(grammar.Grammar): + """Grammar subclass that reads classic pgen output files. + + The run() method reads the tables as produced by the pgen parser + generator, typically contained in two C files, graminit.h and + graminit.c. The other methods are for internal use only. + + See the base class for more documentation. + + """ + + def run(self, graminit_h, graminit_c): + """Load the grammar tables from the text files written by pgen.""" + self.parse_graminit_h(graminit_h) + self.parse_graminit_c(graminit_c) + self.finish_off() + + def parse_graminit_h(self, filename): + """Parse the .h file writen by pgen. (Internal) + + This file is a sequence of #define statements defining the + nonterminals of the grammar as numbers. We build two tables + mapping the numbers to names and back. + + """ + try: + f = open(filename) + except IOError as err: + print("Can't open %s: %s" % (filename, err)) + return False + self.symbol2number = {} + self.number2symbol = {} + lineno = 0 + for line in f: + lineno += 1 + mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line) + if not mo and line.strip(): + print("%s(%s): can't parse %s" % (filename, lineno, + line.strip())) + else: + symbol, number = mo.groups() + number = int(number) + assert symbol not in self.symbol2number + assert number not in self.number2symbol + self.symbol2number[symbol] = number + self.number2symbol[number] = symbol + return True + + def parse_graminit_c(self, filename): + """Parse the .c file writen by pgen. (Internal) + + The file looks as follows. The first two lines are always this: + + #include "pgenheaders.h" + #include "grammar.h" + + After that come four blocks: + + 1) one or more state definitions + 2) a table defining dfas + 3) a table defining labels + 4) a struct defining the grammar + + A state definition has the following form: + - one or more arc arrays, each of the form: + static arc arcs__[] = { + {, }, + ... + }; + - followed by a state array, of the form: + static state states_[] = { + {, arcs__}, + ... + }; + + """ + try: + f = open(filename) + except IOError as err: + print("Can't open %s: %s" % (filename, err)) + return False + # The code below essentially uses f's iterator-ness! + lineno = 0 + + # Expect the two #include lines + lineno, line = lineno+1, next(f) + assert line == '#include "pgenheaders.h"\n', (lineno, line) + lineno, line = lineno+1, next(f) + assert line == '#include "grammar.h"\n', (lineno, line) + + # Parse the state definitions + lineno, line = lineno+1, next(f) + allarcs = {} + states = [] + while line.startswith("static arc "): + while line.startswith("static arc "): + mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$", + line) + assert mo, (lineno, line) + n, m, k = list(map(int, mo.groups())) + arcs = [] + for _ in range(k): + lineno, line = lineno+1, next(f) + mo = re.match(r"\s+{(\d+), (\d+)},$", line) + assert mo, (lineno, line) + i, j = list(map(int, mo.groups())) + arcs.append((i, j)) + lineno, line = lineno+1, next(f) + assert line == "};\n", (lineno, line) + allarcs[(n, m)] = arcs + lineno, line = lineno+1, next(f) + mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line) + assert mo, (lineno, line) + s, t = list(map(int, mo.groups())) + assert s == len(states), (lineno, line) + state = [] + for _ in range(t): + lineno, line = lineno+1, next(f) + mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line) + assert mo, (lineno, line) + k, n, m = list(map(int, mo.groups())) + arcs = allarcs[n, m] + assert k == len(arcs), (lineno, line) + state.append(arcs) + states.append(state) + lineno, line = lineno+1, next(f) + assert line == "};\n", (lineno, line) + lineno, line = lineno+1, next(f) + self.states = states + + # Parse the dfas + dfas = {} + mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line) + assert mo, (lineno, line) + ndfas = int(mo.group(1)) + for i in range(ndfas): + lineno, line = lineno+1, next(f) + mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$', + line) + assert mo, (lineno, line) + symbol = mo.group(2) + number, x, y, z = list(map(int, mo.group(1, 3, 4, 5))) + assert self.symbol2number[symbol] == number, (lineno, line) + assert self.number2symbol[number] == symbol, (lineno, line) + assert x == 0, (lineno, line) + state = states[z] + assert y == len(state), (lineno, line) + lineno, line = lineno+1, next(f) + mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line) + assert mo, (lineno, line) + first = {} + rawbitset = eval(mo.group(1)) + for i, c in enumerate(rawbitset): + byte = ord(c) + for j in range(8): + if byte & (1<= os.path.getmtime(b) diff --git a/lib3/2to3/lib2to3/pgen2/grammar.py b/lib3/2to3/lib2to3/pgen2/grammar.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/pgen2/grammar.py @@ -0,0 +1,184 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""This module defines the data structures used to represent a grammar. + +These are a bit arcane because they are derived from the data +structures used by Python's 'pgen' parser generator. + +There's also a table here mapping operators to their names in the +token module; the Python tokenize module reports all operators as the +fallback token code OP, but the parser needs the actual token code. + +""" + +# Python imports +import pickle + +# Local imports +from . import token, tokenize + + +class Grammar(object): + """Pgen parsing tables tables conversion class. + + Once initialized, this class supplies the grammar tables for the + parsing engine implemented by parse.py. The parsing engine + accesses the instance variables directly. The class here does not + provide initialization of the tables; several subclasses exist to + do this (see the conv and pgen modules). + + The load() method reads the tables from a pickle file, which is + much faster than the other ways offered by subclasses. The pickle + file is written by calling dump() (after loading the grammar + tables using a subclass). The report() method prints a readable + representation of the tables to stdout, for debugging. + + The instance variables are as follows: + + symbol2number -- a dict mapping symbol names to numbers. Symbol + numbers are always 256 or higher, to distinguish + them from token numbers, which are between 0 and + 255 (inclusive). + + number2symbol -- a dict mapping numbers to symbol names; + these two are each other's inverse. + + states -- a list of DFAs, where each DFA is a list of + states, each state is is a list of arcs, and each + arc is a (i, j) pair where i is a label and j is + a state number. The DFA number is the index into + this list. (This name is slightly confusing.) + Final states are represented by a special arc of + the form (0, j) where j is its own state number. + + dfas -- a dict mapping symbol numbers to (DFA, first) + pairs, where DFA is an item from the states list + above, and first is a set of tokens that can + begin this grammar rule (represented by a dict + whose values are always 1). + + labels -- a list of (x, y) pairs where x is either a token + number or a symbol number, and y is either None + or a string; the strings are keywords. The label + number is the index in this list; label numbers + are used to mark state transitions (arcs) in the + DFAs. + + start -- the number of the grammar's start symbol. + + keywords -- a dict mapping keyword strings to arc labels. + + tokens -- a dict mapping token numbers to arc labels. + + """ + + def __init__(self): + self.symbol2number = {} + self.number2symbol = {} + self.states = [] + self.dfas = {} + self.labels = [(0, "EMPTY")] + self.keywords = {} + self.tokens = {} + self.symbol2label = {} + self.start = 256 + + def dump(self, filename): + """Dump the grammar tables to a pickle file.""" + f = open(filename, "wb") + pickle.dump(self.__dict__, f, 2) + f.close() + + def load(self, filename): + """Load the grammar tables from a pickle file.""" + f = open(filename, "rb") + d = pickle.load(f) + f.close() + self.__dict__.update(d) + + def copy(self): + """ + Copy the grammar. + """ + new = self.__class__() + for dict_attr in ("symbol2number", "number2symbol", "dfas", "keywords", + "tokens", "symbol2label"): + setattr(new, dict_attr, getattr(self, dict_attr).copy()) + new.labels = self.labels[:] + new.states = self.states[:] + new.start = self.start + return new + + def report(self): + """Dump the grammar tables to standard output, for debugging.""" + from pprint import pprint + print("s2n") + pprint(self.symbol2number) + print("n2s") + pprint(self.number2symbol) + print("states") + pprint(self.states) + print("dfas") + pprint(self.dfas) + print("labels") + pprint(self.labels) + print("start", self.start) + + +# Map from operator to number (since tokenize doesn't do this) + +opmap_raw = """ +( LPAR +) RPAR +[ LSQB +] RSQB +: COLON +, COMMA +; SEMI ++ PLUS +- MINUS +* STAR +/ SLASH +| VBAR +& AMPER +< LESS +> GREATER += EQUAL +. DOT +% PERCENT +` BACKQUOTE +{ LBRACE +} RBRACE +@ AT +== EQEQUAL +!= NOTEQUAL +<> NOTEQUAL +<= LESSEQUAL +>= GREATEREQUAL +~ TILDE +^ CIRCUMFLEX +<< LEFTSHIFT +>> RIGHTSHIFT +** DOUBLESTAR ++= PLUSEQUAL +-= MINEQUAL +*= STAREQUAL +/= SLASHEQUAL +%= PERCENTEQUAL +&= AMPEREQUAL +|= VBAREQUAL +^= CIRCUMFLEXEQUAL +<<= LEFTSHIFTEQUAL +>>= RIGHTSHIFTEQUAL +**= DOUBLESTAREQUAL +// DOUBLESLASH +//= DOUBLESLASHEQUAL +-> RARROW +""" + +opmap = {} +for line in opmap_raw.splitlines(): + if line: + op, name = line.split() + opmap[op] = getattr(token, name) diff --git a/lib3/2to3/lib2to3/pgen2/literals.py b/lib3/2to3/lib2to3/pgen2/literals.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/pgen2/literals.py @@ -0,0 +1,60 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Safely evaluate Python string literals without using eval().""" + +import re + +simple_escapes = {"a": "\a", + "b": "\b", + "f": "\f", + "n": "\n", + "r": "\r", + "t": "\t", + "v": "\v", + "'": "'", + '"': '"', + "\\": "\\"} + +def escape(m): + all, tail = m.group(0, 1) + assert all.startswith("\\") + esc = simple_escapes.get(tail) + if esc is not None: + return esc + if tail.startswith("x"): + hexes = tail[1:] + if len(hexes) < 2: + raise ValueError("invalid hex string escape ('\\%s')" % tail) + try: + i = int(hexes, 16) + except ValueError: + raise ValueError("invalid hex string escape ('\\%s')" % tail) + else: + try: + i = int(tail, 8) + except ValueError: + raise ValueError("invalid octal string escape ('\\%s')" % tail) + return chr(i) + +def evalString(s): + assert s.startswith("'") or s.startswith('"'), repr(s[:1]) + q = s[0] + if s[:3] == q*3: + q = q*3 + assert s.endswith(q), repr(s[-len(q):]) + assert len(s) >= 2*len(q) + s = s[len(q):-len(q)] + return re.sub(r"\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3})", escape, s) + +def test(): + for i in range(256): + c = chr(i) + s = repr(c) + e = evalString(s) + if e != c: + print(i, c, s, e) + + +if __name__ == "__main__": + test() diff --git a/lib3/2to3/lib2to3/pgen2/parse.py b/lib3/2to3/lib2to3/pgen2/parse.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/pgen2/parse.py @@ -0,0 +1,201 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Parser engine for the grammar tables generated by pgen. + +The grammar table must be loaded first. + +See Parser/parser.c in the Python distribution for additional info on +how this parsing engine works. + +""" + +# Local imports +from . import token + +class ParseError(Exception): + """Exception to signal the parser is stuck.""" + + def __init__(self, msg, type, value, context): + Exception.__init__(self, "%s: type=%r, value=%r, context=%r" % + (msg, type, value, context)) + self.msg = msg + self.type = type + self.value = value + self.context = context + +class Parser(object): + """Parser engine. + + The proper usage sequence is: + + p = Parser(grammar, [converter]) # create instance + p.setup([start]) # prepare for parsing + : + if p.addtoken(...): # parse a token; may raise ParseError + break + root = p.rootnode # root of abstract syntax tree + + A Parser instance may be reused by calling setup() repeatedly. + + A Parser instance contains state pertaining to the current token + sequence, and should not be used concurrently by different threads + to parse separate token sequences. + + See driver.py for how to get input tokens by tokenizing a file or + string. + + Parsing is complete when addtoken() returns True; the root of the + abstract syntax tree can then be retrieved from the rootnode + instance variable. When a syntax error occurs, addtoken() raises + the ParseError exception. There is no error recovery; the parser + cannot be used after a syntax error was reported (but it can be + reinitialized by calling setup()). + + """ + + def __init__(self, grammar, convert=None): + """Constructor. + + The grammar argument is a grammar.Grammar instance; see the + grammar module for more information. + + The parser is not ready yet for parsing; you must call the + setup() method to get it started. + + The optional convert argument is a function mapping concrete + syntax tree nodes to abstract syntax tree nodes. If not + given, no conversion is done and the syntax tree produced is + the concrete syntax tree. If given, it must be a function of + two arguments, the first being the grammar (a grammar.Grammar + instance), and the second being the concrete syntax tree node + to be converted. The syntax tree is converted from the bottom + up. + + A concrete syntax tree node is a (type, value, context, nodes) + tuple, where type is the node type (a token or symbol number), + value is None for symbols and a string for tokens, context is + None or an opaque value used for error reporting (typically a + (lineno, offset) pair), and nodes is a list of children for + symbols, and None for tokens. + + An abstract syntax tree node may be anything; this is entirely + up to the converter function. + + """ + self.grammar = grammar + self.convert = convert or (lambda grammar, node: node) + + def setup(self, start=None): + """Prepare for parsing. + + This *must* be called before starting to parse. + + The optional argument is an alternative start symbol; it + defaults to the grammar's start symbol. + + You can use a Parser instance to parse any number of programs; + each time you call setup() the parser is reset to an initial + state determined by the (implicit or explicit) start symbol. + + """ + if start is None: + start = self.grammar.start + # Each stack entry is a tuple: (dfa, state, node). + # A node is a tuple: (type, value, context, children), + # where children is a list of nodes or None, and context may be None. + newnode = (start, None, None, []) + stackentry = (self.grammar.dfas[start], 0, newnode) + self.stack = [stackentry] + self.rootnode = None + self.used_names = set() # Aliased to self.rootnode.used_names in pop() + + def addtoken(self, type, value, context): + """Add a token; return True iff this is the end of the program.""" + # Map from token to label + ilabel = self.classify(type, value, context) + # Loop until the token is shifted; may raise exceptions + while True: + dfa, state, node = self.stack[-1] + states, first = dfa + arcs = states[state] + # Look for a state with this label + for i, newstate in arcs: + t, v = self.grammar.labels[i] + if ilabel == i: + # Look it up in the list of labels + assert t < 256 + # Shift a token; we're done with it + self.shift(type, value, newstate, context) + # Pop while we are in an accept-only state + state = newstate + while states[state] == [(0, state)]: + self.pop() + if not self.stack: + # Done parsing! + return True + dfa, state, node = self.stack[-1] + states, first = dfa + # Done with this token + return False + elif t >= 256: + # See if it's a symbol and if we're in its first set + itsdfa = self.grammar.dfas[t] + itsstates, itsfirst = itsdfa + if ilabel in itsfirst: + # Push a symbol + self.push(t, self.grammar.dfas[t], newstate, context) + break # To continue the outer while loop + else: + if (0, state) in arcs: + # An accepting state, pop it and try something else + self.pop() + if not self.stack: + # Done parsing, but another token is input + raise ParseError("too much input", + type, value, context) + else: + # No success finding a transition + raise ParseError("bad input", type, value, context) + + def classify(self, type, value, context): + """Turn a token into a label. (Internal)""" + if type == token.NAME: + # Keep a listing of all used names + self.used_names.add(value) + # Check for reserved words + ilabel = self.grammar.keywords.get(value) + if ilabel is not None: + return ilabel + ilabel = self.grammar.tokens.get(type) + if ilabel is None: + raise ParseError("bad token", type, value, context) + return ilabel + + def shift(self, type, value, newstate, context): + """Shift a token. (Internal)""" + dfa, state, node = self.stack[-1] + newnode = (type, value, context, None) + newnode = self.convert(self.grammar, newnode) + if newnode is not None: + node[-1].append(newnode) + self.stack[-1] = (dfa, newstate, node) + + def push(self, type, newdfa, newstate, context): + """Push a nonterminal. (Internal)""" + dfa, state, node = self.stack[-1] + newnode = (type, None, context, []) + self.stack[-1] = (dfa, newstate, node) + self.stack.append((newdfa, 0, newnode)) + + def pop(self): + """Pop a nonterminal. (Internal)""" + popdfa, popstate, popnode = self.stack.pop() + newnode = self.convert(self.grammar, popnode) + if newnode is not None: + if self.stack: + dfa, state, node = self.stack[-1] + node[-1].append(newnode) + else: + self.rootnode = newnode + self.rootnode.used_names = self.used_names diff --git a/lib3/2to3/lib2to3/pgen2/pgen.py b/lib3/2to3/lib2to3/pgen2/pgen.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/pgen2/pgen.py @@ -0,0 +1,386 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# Pgen imports +from . import grammar, token, tokenize + +class PgenGrammar(grammar.Grammar): + pass + +class ParserGenerator(object): + + def __init__(self, filename, stream=None): + close_stream = None + if stream is None: + stream = open(filename) + close_stream = stream.close + self.filename = filename + self.stream = stream + self.generator = tokenize.generate_tokens(stream.readline) + self.gettoken() # Initialize lookahead + self.dfas, self.startsymbol = self.parse() + if close_stream is not None: + close_stream() + self.first = {} # map from symbol name to set of tokens + self.addfirstsets() + + def make_grammar(self): + c = PgenGrammar() + names = list(self.dfas.keys()) + names.sort() + names.remove(self.startsymbol) + names.insert(0, self.startsymbol) + for name in names: + i = 256 + len(c.symbol2number) + c.symbol2number[name] = i + c.number2symbol[i] = name + for name in names: + dfa = self.dfas[name] + states = [] + for state in dfa: + arcs = [] + for label, next in state.arcs.items(): + arcs.append((self.make_label(c, label), dfa.index(next))) + if state.isfinal: + arcs.append((0, dfa.index(state))) + states.append(arcs) + c.states.append(states) + c.dfas[c.symbol2number[name]] = (states, self.make_first(c, name)) + c.start = c.symbol2number[self.startsymbol] + return c + + def make_first(self, c, name): + rawfirst = self.first[name] + first = {} + for label in rawfirst: + ilabel = self.make_label(c, label) + ##assert ilabel not in first # XXX failed on <> ... != + first[ilabel] = 1 + return first + + def make_label(self, c, label): + # XXX Maybe this should be a method on a subclass of converter? + ilabel = len(c.labels) + if label[0].isalpha(): + # Either a symbol name or a named token + if label in c.symbol2number: + # A symbol name (a non-terminal) + if label in c.symbol2label: + return c.symbol2label[label] + else: + c.labels.append((c.symbol2number[label], None)) + c.symbol2label[label] = ilabel + return ilabel + else: + # A named token (NAME, NUMBER, STRING) + itoken = getattr(token, label, None) + assert isinstance(itoken, int), label + assert itoken in token.tok_name, label + if itoken in c.tokens: + return c.tokens[itoken] + else: + c.labels.append((itoken, None)) + c.tokens[itoken] = ilabel + return ilabel + else: + # Either a keyword or an operator + assert label[0] in ('"', "'"), label + value = eval(label) + if value[0].isalpha(): + # A keyword + if value in c.keywords: + return c.keywords[value] + else: + c.labels.append((token.NAME, value)) + c.keywords[value] = ilabel + return ilabel + else: + # An operator (any non-numeric token) + itoken = grammar.opmap[value] # Fails if unknown token + if itoken in c.tokens: + return c.tokens[itoken] + else: + c.labels.append((itoken, None)) + c.tokens[itoken] = ilabel + return ilabel + + def addfirstsets(self): + names = list(self.dfas.keys()) + names.sort() + for name in names: + if name not in self.first: + self.calcfirst(name) + #print name, self.first[name].keys() + + def calcfirst(self, name): + dfa = self.dfas[name] + self.first[name] = None # dummy to detect left recursion + state = dfa[0] + totalset = {} + overlapcheck = {} + for label, next in state.arcs.items(): + if label in self.dfas: + if label in self.first: + fset = self.first[label] + if fset is None: + raise ValueError("recursion for rule %r" % name) + else: + self.calcfirst(label) + fset = self.first[label] + totalset.update(fset) + overlapcheck[label] = fset + else: + totalset[label] = 1 + overlapcheck[label] = {label: 1} + inverse = {} + for label, itsfirst in overlapcheck.items(): + for symbol in itsfirst: + if symbol in inverse: + raise ValueError("rule %s is ambiguous; %s is in the" + " first sets of %s as well as %s" % + (name, symbol, label, inverse[symbol])) + inverse[symbol] = label + self.first[name] = totalset + + def parse(self): + dfas = {} + startsymbol = None + # MSTART: (NEWLINE | RULE)* ENDMARKER + while self.type != token.ENDMARKER: + while self.type == token.NEWLINE: + self.gettoken() + # RULE: NAME ':' RHS NEWLINE + name = self.expect(token.NAME) + self.expect(token.OP, ":") + a, z = self.parse_rhs() + self.expect(token.NEWLINE) + #self.dump_nfa(name, a, z) + dfa = self.make_dfa(a, z) + #self.dump_dfa(name, dfa) + oldlen = len(dfa) + self.simplify_dfa(dfa) + newlen = len(dfa) + dfas[name] = dfa + #print name, oldlen, newlen + if startsymbol is None: + startsymbol = name + return dfas, startsymbol + + def make_dfa(self, start, finish): + # To turn an NFA into a DFA, we define the states of the DFA + # to correspond to *sets* of states of the NFA. Then do some + # state reduction. Let's represent sets as dicts with 1 for + # values. + assert isinstance(start, NFAState) + assert isinstance(finish, NFAState) + def closure(state): + base = {} + addclosure(state, base) + return base + def addclosure(state, base): + assert isinstance(state, NFAState) + if state in base: + return + base[state] = 1 + for label, next in state.arcs: + if label is None: + addclosure(next, base) + states = [DFAState(closure(start), finish)] + for state in states: # NB states grows while we're iterating + arcs = {} + for nfastate in state.nfaset: + for label, next in nfastate.arcs: + if label is not None: + addclosure(next, arcs.setdefault(label, {})) + for label, nfaset in arcs.items(): + for st in states: + if st.nfaset == nfaset: + break + else: + st = DFAState(nfaset, finish) + states.append(st) + state.addarc(st, label) + return states # List of DFAState instances; first one is start + + def dump_nfa(self, name, start, finish): + print("Dump of NFA for", name) + todo = [start] + for i, state in enumerate(todo): + print(" State", i, state is finish and "(final)" or "") + for label, next in state.arcs: + if next in todo: + j = todo.index(next) + else: + j = len(todo) + todo.append(next) + if label is None: + print(" -> %d" % j) + else: + print(" %s -> %d" % (label, j)) + + def dump_dfa(self, name, dfa): + print("Dump of DFA for", name) + for i, state in enumerate(dfa): + print(" State", i, state.isfinal and "(final)" or "") + for label, next in state.arcs.items(): + print(" %s -> %d" % (label, dfa.index(next))) + + def simplify_dfa(self, dfa): + # This is not theoretically optimal, but works well enough. + # Algorithm: repeatedly look for two states that have the same + # set of arcs (same labels pointing to the same nodes) and + # unify them, until things stop changing. + + # dfa is a list of DFAState instances + changes = True + while changes: + changes = False + for i, state_i in enumerate(dfa): + for j in range(i+1, len(dfa)): + state_j = dfa[j] + if state_i == state_j: + #print " unify", i, j + del dfa[j] + for state in dfa: + state.unifystate(state_j, state_i) + changes = True + break + + def parse_rhs(self): + # RHS: ALT ('|' ALT)* + a, z = self.parse_alt() + if self.value != "|": + return a, z + else: + aa = NFAState() + zz = NFAState() + aa.addarc(a) + z.addarc(zz) + while self.value == "|": + self.gettoken() + a, z = self.parse_alt() + aa.addarc(a) + z.addarc(zz) + return aa, zz + + def parse_alt(self): + # ALT: ITEM+ + a, b = self.parse_item() + while (self.value in ("(", "[") or + self.type in (token.NAME, token.STRING)): + c, d = self.parse_item() + b.addarc(c) + b = d + return a, b + + def parse_item(self): + # ITEM: '[' RHS ']' | ATOM ['+' | '*'] + if self.value == "[": + self.gettoken() + a, z = self.parse_rhs() + self.expect(token.OP, "]") + a.addarc(z) + return a, z + else: + a, z = self.parse_atom() + value = self.value + if value not in ("+", "*"): + return a, z + self.gettoken() + z.addarc(a) + if value == "+": + return a, z + else: + return a, a + + def parse_atom(self): + # ATOM: '(' RHS ')' | NAME | STRING + if self.value == "(": + self.gettoken() + a, z = self.parse_rhs() + self.expect(token.OP, ")") + return a, z + elif self.type in (token.NAME, token.STRING): + a = NFAState() + z = NFAState() + a.addarc(z, self.value) + self.gettoken() + return a, z + else: + self.raise_error("expected (...) or NAME or STRING, got %s/%s", + self.type, self.value) + + def expect(self, type, value=None): + if self.type != type or (value is not None and self.value != value): + self.raise_error("expected %s/%s, got %s/%s", + type, value, self.type, self.value) + value = self.value + self.gettoken() + return value + + def gettoken(self): + tup = next(self.generator) + while tup[0] in (tokenize.COMMENT, tokenize.NL): + tup = next(self.generator) + self.type, self.value, self.begin, self.end, self.line = tup + #print token.tok_name[self.type], repr(self.value) + + def raise_error(self, msg, *args): + if args: + try: + msg = msg % args + except: + msg = " ".join([msg] + list(map(str, args))) + raise SyntaxError(msg, (self.filename, self.end[0], + self.end[1], self.line)) + +class NFAState(object): + + def __init__(self): + self.arcs = [] # list of (label, NFAState) pairs + + def addarc(self, next, label=None): + assert label is None or isinstance(label, str) + assert isinstance(next, NFAState) + self.arcs.append((label, next)) + +class DFAState(object): + + def __init__(self, nfaset, final): + assert isinstance(nfaset, dict) + assert isinstance(next(iter(nfaset)), NFAState) + assert isinstance(final, NFAState) + self.nfaset = nfaset + self.isfinal = final in nfaset + self.arcs = {} # map from label to DFAState + + def addarc(self, next, label): + assert isinstance(label, str) + assert label not in self.arcs + assert isinstance(next, DFAState) + self.arcs[label] = next + + def unifystate(self, old, new): + for label, next in self.arcs.items(): + if next is old: + self.arcs[label] = new + + def __eq__(self, other): + # Equality test -- ignore the nfaset instance variable + assert isinstance(other, DFAState) + if self.isfinal != other.isfinal: + return False + # Can't just return self.arcs == other.arcs, because that + # would invoke this method recursively, with cycles... + if len(self.arcs) != len(other.arcs): + return False + for label, next in self.arcs.items(): + if next is not other.arcs.get(label): + return False + return True + + __hash__ = None # For Py3 compatibility. + +def generate_grammar(filename="Grammar.txt"): + p = ParserGenerator(filename) + return p.make_grammar() diff --git a/lib3/2to3/lib2to3/pgen2/token.py b/lib3/2to3/lib2to3/pgen2/token.py new file mode 100755 --- /dev/null +++ b/lib3/2to3/lib2to3/pgen2/token.py @@ -0,0 +1,82 @@ +#! /usr/bin/env python + +"""Token constants (from "token.h").""" + +# Taken from Python (r53757) and modified to include some tokens +# originally monkeypatched in by pgen2.tokenize + +#--start constants-- +ENDMARKER = 0 +NAME = 1 +NUMBER = 2 +STRING = 3 +NEWLINE = 4 +INDENT = 5 +DEDENT = 6 +LPAR = 7 +RPAR = 8 +LSQB = 9 +RSQB = 10 +COLON = 11 +COMMA = 12 +SEMI = 13 +PLUS = 14 +MINUS = 15 +STAR = 16 +SLASH = 17 +VBAR = 18 +AMPER = 19 +LESS = 20 +GREATER = 21 +EQUAL = 22 +DOT = 23 +PERCENT = 24 +BACKQUOTE = 25 +LBRACE = 26 +RBRACE = 27 +EQEQUAL = 28 +NOTEQUAL = 29 +LESSEQUAL = 30 +GREATEREQUAL = 31 +TILDE = 32 +CIRCUMFLEX = 33 +LEFTSHIFT = 34 +RIGHTSHIFT = 35 +DOUBLESTAR = 36 +PLUSEQUAL = 37 +MINEQUAL = 38 +STAREQUAL = 39 +SLASHEQUAL = 40 +PERCENTEQUAL = 41 +AMPEREQUAL = 42 +VBAREQUAL = 43 +CIRCUMFLEXEQUAL = 44 +LEFTSHIFTEQUAL = 45 +RIGHTSHIFTEQUAL = 46 +DOUBLESTAREQUAL = 47 +DOUBLESLASH = 48 +DOUBLESLASHEQUAL = 49 +AT = 50 +OP = 51 +COMMENT = 52 +NL = 53 +RARROW = 54 +ERRORTOKEN = 55 +N_TOKENS = 56 +NT_OFFSET = 256 +#--end constants-- + +tok_name = {} +for _name, _value in list(globals().items()): + if type(_value) is type(0): + tok_name[_value] = _name + + +def ISTERMINAL(x): + return x < NT_OFFSET + +def ISNONTERMINAL(x): + return x >= NT_OFFSET + +def ISEOF(x): + return x == ENDMARKER diff --git a/lib3/2to3/lib2to3/pgen2/tokenize.py b/lib3/2to3/lib2to3/pgen2/tokenize.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/pgen2/tokenize.py @@ -0,0 +1,500 @@ +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation. +# All rights reserved. + +"""Tokenization help for Python programs. + +generate_tokens(readline) is a generator that breaks a stream of +text into Python tokens. It accepts a readline-like method which is called +repeatedly to get the next line of input (or "" for EOF). It generates +5-tuples with these members: + + the token type (see token.py) + the token (a string) + the starting (row, column) indices of the token (a 2-tuple of ints) + the ending (row, column) indices of the token (a 2-tuple of ints) + the original line (string) + +It is designed to match the working of the Python tokenizer exactly, except +that it produces COMMENT tokens for comments and gives type OP for all +operators + +Older entry points + tokenize_loop(readline, tokeneater) + tokenize(readline, tokeneater=printtoken) +are the same, except instead of generating tokens, tokeneater is a callback +function to which the 5 fields described above are passed as 5 arguments, +each time a new token is found.""" + +__author__ = 'Ka-Ping Yee ' +__credits__ = \ + 'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro' + +import string, re +from codecs import BOM_UTF8, lookup +from lib2to3.pgen2.token import * + +from . import token +__all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize", + "generate_tokens", "untokenize"] +del token + +try: + bytes +except NameError: + # Support bytes type in Python <= 2.5, so 2to3 turns itself into + # valid Python 3 code. + bytes = str + +def group(*choices): return '(' + '|'.join(choices) + ')' +def any(*choices): return group(*choices) + '*' +def maybe(*choices): return group(*choices) + '?' + +Whitespace = r'[ \f\t]*' +Comment = r'#[^\r\n]*' +Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment) +Name = r'[a-zA-Z_]\w*' + +Binnumber = r'0[bB][01]*' +Hexnumber = r'0[xX][\da-fA-F]*[lL]?' +Octnumber = r'0[oO]?[0-7]*[lL]?' +Decnumber = r'[1-9]\d*[lL]?' +Intnumber = group(Binnumber, Hexnumber, Octnumber, Decnumber) +Exponent = r'[eE][-+]?\d+' +Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent) +Expfloat = r'\d+' + Exponent +Floatnumber = group(Pointfloat, Expfloat) +Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]') +Number = group(Imagnumber, Floatnumber, Intnumber) + +# Tail end of ' string. +Single = r"[^'\\]*(?:\\.[^'\\]*)*'" +# Tail end of " string. +Double = r'[^"\\]*(?:\\.[^"\\]*)*"' +# Tail end of ''' string. +Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''" +# Tail end of """ string. +Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""' +Triple = group("[ubUB]?[rR]?'''", '[ubUB]?[rR]?"""') +# Single-line ' or " string. +String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'", + r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"') + +# Because of leftmost-then-longest match semantics, be sure to put the +# longest operators first (e.g., if = came before ==, == would get +# recognized as two instances of =). +Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=", + r"//=?", r"->", + r"[+\-*/%&|^=<>]=?", + r"~") + +Bracket = '[][(){}]' +Special = group(r'\r?\n', r'[:;.,`@]') +Funny = group(Operator, Bracket, Special) + +PlainToken = group(Number, Funny, String, Name) +Token = Ignore + PlainToken + +# First (or only) line of ' or " string. +ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" + + group("'", r'\\\r?\n'), + r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' + + group('"', r'\\\r?\n')) +PseudoExtras = group(r'\\\r?\n', Comment, Triple) +PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name) + +tokenprog, pseudoprog, single3prog, double3prog = list(map( + re.compile, (Token, PseudoToken, Single3, Double3))) +endprogs = {"'": re.compile(Single), '"': re.compile(Double), + "'''": single3prog, '"""': double3prog, + "r'''": single3prog, 'r"""': double3prog, + "u'''": single3prog, 'u"""': double3prog, + "b'''": single3prog, 'b"""': double3prog, + "ur'''": single3prog, 'ur"""': double3prog, + "br'''": single3prog, 'br"""': double3prog, + "R'''": single3prog, 'R"""': double3prog, + "U'''": single3prog, 'U"""': double3prog, + "B'''": single3prog, 'B"""': double3prog, + "uR'''": single3prog, 'uR"""': double3prog, + "Ur'''": single3prog, 'Ur"""': double3prog, + "UR'''": single3prog, 'UR"""': double3prog, + "bR'''": single3prog, 'bR"""': double3prog, + "Br'''": single3prog, 'Br"""': double3prog, + "BR'''": single3prog, 'BR"""': double3prog, + 'r': None, 'R': None, + 'u': None, 'U': None, + 'b': None, 'B': None} + +triple_quoted = {} +for t in ("'''", '"""', + "r'''", 'r"""', "R'''", 'R"""', + "u'''", 'u"""', "U'''", 'U"""', + "b'''", 'b"""', "B'''", 'B"""', + "ur'''", 'ur"""', "Ur'''", 'Ur"""', + "uR'''", 'uR"""', "UR'''", 'UR"""', + "br'''", 'br"""', "Br'''", 'Br"""', + "bR'''", 'bR"""', "BR'''", 'BR"""',): + triple_quoted[t] = t +single_quoted = {} +for t in ("'", '"', + "r'", 'r"', "R'", 'R"', + "u'", 'u"', "U'", 'U"', + "b'", 'b"', "B'", 'B"', + "ur'", 'ur"', "Ur'", 'Ur"', + "uR'", 'uR"', "UR'", 'UR"', + "br'", 'br"', "Br'", 'Br"', + "bR'", 'bR"', "BR'", 'BR"', ): + single_quoted[t] = t + +tabsize = 8 + +class TokenError(Exception): pass + +class StopTokenizing(Exception): pass + +def printtoken(type, token, xxx_todo_changeme, xxx_todo_changeme1, line): # for testing + (srow, scol) = xxx_todo_changeme + (erow, ecol) = xxx_todo_changeme1 + print("%d,%d-%d,%d:\t%s\t%s" % \ + (srow, scol, erow, ecol, tok_name[type], repr(token))) + +def tokenize(readline, tokeneater=printtoken): + """ + The tokenize() function accepts two parameters: one representing the + input stream, and one providing an output mechanism for tokenize(). + + The first parameter, readline, must be a callable object which provides + the same interface as the readline() method of built-in file objects. + Each call to the function should return one line of input as a string. + + The second parameter, tokeneater, must also be a callable object. It is + called once for each token, with five arguments, corresponding to the + tuples generated by generate_tokens(). + """ + try: + tokenize_loop(readline, tokeneater) + except StopTokenizing: + pass + +# backwards compatible interface +def tokenize_loop(readline, tokeneater): + for token_info in generate_tokens(readline): + tokeneater(*token_info) + +class Untokenizer: + + def __init__(self): + self.tokens = [] + self.prev_row = 1 + self.prev_col = 0 + + def add_whitespace(self, start): + row, col = start + assert row <= self.prev_row + col_offset = col - self.prev_col + if col_offset: + self.tokens.append(" " * col_offset) + + def untokenize(self, iterable): + for t in iterable: + if len(t) == 2: + self.compat(t, iterable) + break + tok_type, token, start, end, line = t + self.add_whitespace(start) + self.tokens.append(token) + self.prev_row, self.prev_col = end + if tok_type in (NEWLINE, NL): + self.prev_row += 1 + self.prev_col = 0 + return "".join(self.tokens) + + def compat(self, token, iterable): + startline = False + indents = [] + toks_append = self.tokens.append + toknum, tokval = token + if toknum in (NAME, NUMBER): + tokval += ' ' + if toknum in (NEWLINE, NL): + startline = True + for tok in iterable: + toknum, tokval = tok[:2] + + if toknum in (NAME, NUMBER): + tokval += ' ' + + if toknum == INDENT: + indents.append(tokval) + continue + elif toknum == DEDENT: + indents.pop() + continue + elif toknum in (NEWLINE, NL): + startline = True + elif startline and indents: + toks_append(indents[-1]) + startline = False + toks_append(tokval) + +cookie_re = re.compile("coding[:=]\s*([-\w.]+)") + +def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + +def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argment, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read + in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, but + disagree, a SyntaxError will be raised. If the encoding cookie is an invalid + charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + bom_found = False + encoding = None + default = 'utf-8' + def read_or_stop(): + try: + return readline() + except StopIteration: + return bytes() + + def find_cookie(line): + try: + line_string = line.decode('ascii') + except UnicodeDecodeError: + return None + + matches = cookie_re.findall(line_string) + if not matches: + return None + encoding = _get_normal_name(matches[0]) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + raise SyntaxError("unknown encoding: " + encoding) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + raise SyntaxError('encoding problem: utf-8') + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + +def untokenize(iterable): + """Transform tokens back into Python source code. + + Each element returned by the iterable must be a token sequence + with at least two elements, a token number and token value. If + only two tokens are passed, the resulting output is poor. + + Round-trip invariant for full input: + Untokenized source will match input source exactly + + Round-trip invariant for limited intput: + # Output text will tokenize the back to the input + t1 = [tok[:2] for tok in generate_tokens(f.readline)] + newcode = untokenize(t1) + readline = iter(newcode.splitlines(1)).next + t2 = [tok[:2] for tokin generate_tokens(readline)] + assert t1 == t2 + """ + ut = Untokenizer() + return ut.untokenize(iterable) + +def generate_tokens(readline): + """ + The generate_tokens() generator requires one argment, readline, which + must be a callable object which provides the same interface as the + readline() method of built-in file objects. Each call to the function + should return one line of input as a string. Alternately, readline + can be a callable function terminating with StopIteration: + readline = open(myfile).next # Example of alternate readline + + The generator produces 5-tuples with these members: the token type; the + token string; a 2-tuple (srow, scol) of ints specifying the row and + column where the token begins in the source; a 2-tuple (erow, ecol) of + ints specifying the row and column where the token ends in the source; + and the line on which the token was found. The line passed is the + logical line; continuation lines are included. + """ + lnum = parenlev = continued = 0 + namechars, numchars = string.ascii_letters + '_', '0123456789' + contstr, needcont = '', 0 + contline = None + indents = [0] + + while 1: # loop over lines in stream + try: + line = readline() + except StopIteration: + line = '' + lnum = lnum + 1 + pos, max = 0, len(line) + + if contstr: # continued string + if not line: + raise TokenError("EOF in multi-line string", strstart) + endmatch = endprog.match(line) + if endmatch: + pos = end = endmatch.end(0) + yield (STRING, contstr + line[:end], + strstart, (lnum, end), contline + line) + contstr, needcont = '', 0 + contline = None + elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n': + yield (ERRORTOKEN, contstr + line, + strstart, (lnum, len(line)), contline) + contstr = '' + contline = None + continue + else: + contstr = contstr + line + contline = contline + line + continue + + elif parenlev == 0 and not continued: # new statement + if not line: break + column = 0 + while pos < max: # measure leading whitespace + if line[pos] == ' ': column = column + 1 + elif line[pos] == '\t': column = (column//tabsize + 1)*tabsize + elif line[pos] == '\f': column = 0 + else: break + pos = pos + 1 + if pos == max: break + + if line[pos] in '#\r\n': # skip comments or blank lines + if line[pos] == '#': + comment_token = line[pos:].rstrip('\r\n') + nl_pos = pos + len(comment_token) + yield (COMMENT, comment_token, + (lnum, pos), (lnum, pos + len(comment_token)), line) + yield (NL, line[nl_pos:], + (lnum, nl_pos), (lnum, len(line)), line) + else: + yield ((NL, COMMENT)[line[pos] == '#'], line[pos:], + (lnum, pos), (lnum, len(line)), line) + continue + + if column > indents[-1]: # count indents or dedents + indents.append(column) + yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line) + while column < indents[-1]: + if column not in indents: + raise IndentationError( + "unindent does not match any outer indentation level", + ("", lnum, pos, line)) + indents = indents[:-1] + yield (DEDENT, '', (lnum, pos), (lnum, pos), line) + + else: # continued statement + if not line: + raise TokenError("EOF in multi-line statement", (lnum, 0)) + continued = 0 + + while pos < max: + pseudomatch = pseudoprog.match(line, pos) + if pseudomatch: # scan for tokens + start, end = pseudomatch.span(1) + spos, epos, pos = (lnum, start), (lnum, end), end + token, initial = line[start:end], line[start] + + if initial in numchars or \ + (initial == '.' and token != '.'): # ordinary number + yield (NUMBER, token, spos, epos, line) + elif initial in '\r\n': + newline = NEWLINE + if parenlev > 0: + newline = NL + yield (newline, token, spos, epos, line) + elif initial == '#': + assert not token.endswith("\n") + yield (COMMENT, token, spos, epos, line) + elif token in triple_quoted: + endprog = endprogs[token] + endmatch = endprog.match(line, pos) + if endmatch: # all on one line + pos = endmatch.end(0) + token = line[start:pos] + yield (STRING, token, spos, (lnum, pos), line) + else: + strstart = (lnum, start) # multiple lines + contstr = line[start:] + contline = line + break + elif initial in single_quoted or \ + token[:2] in single_quoted or \ + token[:3] in single_quoted: + if token[-1] == '\n': # continued string + strstart = (lnum, start) + endprog = (endprogs[initial] or endprogs[token[1]] or + endprogs[token[2]]) + contstr, needcont = line[start:], 1 + contline = line + break + else: # ordinary string + yield (STRING, token, spos, epos, line) + elif initial in namechars: # ordinary name + yield (NAME, token, spos, epos, line) + elif initial == '\\': # continued stmt + # This yield is new; needed for better idempotency: + yield (NL, token, spos, (lnum, pos), line) + continued = 1 + else: + if initial in '([{': parenlev = parenlev + 1 + elif initial in ')]}': parenlev = parenlev - 1 + yield (OP, token, spos, epos, line) + else: + yield (ERRORTOKEN, line[pos], + (lnum, pos), (lnum, pos+1), line) + pos = pos + 1 + + for indent in indents[1:]: # pop remaining indent levels + yield (DEDENT, '', (lnum, 0), (lnum, 0), '') + yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '') + +if __name__ == '__main__': # testing + import sys + if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline) + else: tokenize(sys.stdin.readline) diff --git a/lib3/2to3/lib2to3/pygram.py b/lib3/2to3/lib2to3/pygram.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/pygram.py @@ -0,0 +1,40 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Export the Python grammar and symbols.""" + +# Python imports +import os + +# Local imports +from .pgen2 import token +from .pgen2 import driver +from . import pytree + +# The grammar file +_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt") +_PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), + "PatternGrammar.txt") + + +class Symbols(object): + + def __init__(self, grammar): + """Initializer. + + Creates an attribute for each grammar symbol (nonterminal), + whose value is the symbol's type (an int >= 256). + """ + for name, symbol in grammar.symbol2number.items(): + setattr(self, name, symbol) + + +python_grammar = driver.load_grammar(_GRAMMAR_FILE) + +python_symbols = Symbols(python_grammar) + +python_grammar_no_print_statement = python_grammar.copy() +del python_grammar_no_print_statement.keywords["print"] + +pattern_grammar = driver.load_grammar(_PATTERN_GRAMMAR_FILE) +pattern_symbols = Symbols(pattern_grammar) diff --git a/lib3/2to3/lib2to3/pytree.py b/lib3/2to3/lib2to3/pytree.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/pytree.py @@ -0,0 +1,884 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +""" +Python parse tree definitions. + +This is a very concrete parse tree; we need to keep every token and +even the comments and whitespace between tokens. + +There's also a pattern matching implementation here. +""" + +__author__ = "Guido van Rossum " + +import sys +import warnings +from io import StringIO + +HUGE = 0x7FFFFFFF # maximum repeat count, default max + +_type_reprs = {} +def type_repr(type_num): + global _type_reprs + if not _type_reprs: + from .pygram import python_symbols + # printing tokens is possible but not as useful + # from .pgen2 import token // token.__dict__.items(): + for name, val in list(python_symbols.__dict__.items()): + if type(val) == int: _type_reprs[val] = name + return _type_reprs.setdefault(type_num, type_num) + +class Base(object): + + """ + Abstract base class for Node and Leaf. + + This provides some default functionality and boilerplate using the + template pattern. + + A node may be a subnode of at most one parent. + """ + + # Default values for instance variables + type = None # int: token number (< 256) or symbol number (>= 256) + parent = None # Parent node pointer, or None + children = () # Tuple of subnodes + was_changed = False + was_checked = False + + def __new__(cls, *args, **kwds): + """Constructor that prevents Base from being instantiated.""" + assert cls is not Base, "Cannot instantiate Base" + return object.__new__(cls) + + def __eq__(self, other): + """ + Compare two nodes for equality. + + This calls the method _eq(). + """ + if self.__class__ is not other.__class__: + return NotImplemented + return self._eq(other) + + __hash__ = None # For Py3 compatibility. + + def __ne__(self, other): + """ + Compare two nodes for inequality. + + This calls the method _eq(). + """ + if self.__class__ is not other.__class__: + return NotImplemented + return not self._eq(other) + + def _eq(self, other): + """ + Compare two nodes for equality. + + This is called by __eq__ and __ne__. It is only called if the two nodes + have the same type. This must be implemented by the concrete subclass. + Nodes should be considered equal if they have the same structure, + ignoring the prefix string and other context information. + """ + raise NotImplementedError + + def clone(self): + """ + Return a cloned (deep) copy of self. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def post_order(self): + """ + Return a post-order iterator for the tree. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def pre_order(self): + """ + Return a pre-order iterator for the tree. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def set_prefix(self, prefix): + """ + Set the prefix for the node (see Leaf class). + + DEPRECATED; use the prefix property directly. + """ + warnings.warn("set_prefix() is deprecated; use the prefix property", + DeprecationWarning, stacklevel=2) + self.prefix = prefix + + def get_prefix(self): + """ + Return the prefix for the node (see Leaf class). + + DEPRECATED; use the prefix property directly. + """ + warnings.warn("get_prefix() is deprecated; use the prefix property", + DeprecationWarning, stacklevel=2) + return self.prefix + + def replace(self, new): + """Replace this node with a new one in the parent.""" + assert self.parent is not None, str(self) + assert new is not None + if not isinstance(new, list): + new = [new] + l_children = [] + found = False + for ch in self.parent.children: + if ch is self: + assert not found, (self.parent.children, self, new) + if new is not None: + l_children.extend(new) + found = True + else: + l_children.append(ch) + assert found, (self.children, self, new) + self.parent.changed() + self.parent.children = l_children + for x in new: + x.parent = self.parent + self.parent = None + + def get_lineno(self): + """Return the line number which generated the invocant node.""" + node = self + while not isinstance(node, Leaf): + if not node.children: + return + node = node.children[0] + return node.lineno + + def changed(self): + if self.parent: + self.parent.changed() + self.was_changed = True + + def remove(self): + """ + Remove the node from the tree. Returns the position of the node in its + parent's children before it was removed. + """ + if self.parent: + for i, node in enumerate(self.parent.children): + if node is self: + self.parent.changed() + del self.parent.children[i] + self.parent = None + return i + + @property + def next_sibling(self): + """ + The node immediately following the invocant in their parent's children + list. If the invocant does not have a next sibling, it is None + """ + if self.parent is None: + return None + + # Can't use index(); we need to test by identity + for i, child in enumerate(self.parent.children): + if child is self: + try: + return self.parent.children[i+1] + except IndexError: + return None + + @property + def prev_sibling(self): + """ + The node immediately preceding the invocant in their parent's children + list. If the invocant does not have a previous sibling, it is None. + """ + if self.parent is None: + return None + + # Can't use index(); we need to test by identity + for i, child in enumerate(self.parent.children): + if child is self: + if i == 0: + return None + return self.parent.children[i-1] + + def leaves(self): + for child in self.children: + for x in child.leaves(): + yield x + + def depth(self): + if self.parent is None: + return 0 + return 1 + self.parent.depth() + + def get_suffix(self): + """ + Return the string immediately following the invocant node. This is + effectively equivalent to node.next_sibling.prefix + """ + next_sib = self.next_sibling + if next_sib is None: + return "" + return next_sib.prefix + + if sys.version_info < (3, 0): + def __str__(self): + return str(self).encode("ascii") + +class Node(Base): + + """Concrete implementation for interior nodes.""" + + def __init__(self,type, children, + context=None, + prefix=None, + fixers_applied=None): + """ + Initializer. + + Takes a type constant (a symbol number >= 256), a sequence of + child nodes, and an optional context keyword argument. + + As a side effect, the parent pointers of the children are updated. + """ + assert type >= 256, type + self.type = type + self.children = list(children) + for ch in self.children: + assert ch.parent is None, repr(ch) + ch.parent = self + if prefix is not None: + self.prefix = prefix + if fixers_applied: + self.fixers_applied = fixers_applied[:] + else: + self.fixers_applied = None + + def __repr__(self): + """Return a canonical string representation.""" + return "%s(%s, %r)" % (self.__class__.__name__, + type_repr(self.type), + self.children) + + def __unicode__(self): + """ + Return a pretty string representation. + + This reproduces the input source exactly. + """ + return "".join(map(str, self.children)) + + if sys.version_info > (3, 0): + __str__ = __unicode__ + + def _eq(self, other): + """Compare two nodes for equality.""" + return (self.type, self.children) == (other.type, other.children) + + def clone(self): + """Return a cloned (deep) copy of self.""" + return Node(self.type, [ch.clone() for ch in self.children], + fixers_applied=self.fixers_applied) + + def post_order(self): + """Return a post-order iterator for the tree.""" + for child in self.children: + for node in child.post_order(): + yield node + yield self + + def pre_order(self): + """Return a pre-order iterator for the tree.""" + yield self + for child in self.children: + for node in child.pre_order(): + yield node + + def _prefix_getter(self): + """ + The whitespace and comments preceding this node in the input. + """ + if not self.children: + return "" + return self.children[0].prefix + + def _prefix_setter(self, prefix): + if self.children: + self.children[0].prefix = prefix + + prefix = property(_prefix_getter, _prefix_setter) + + def set_child(self, i, child): + """ + Equivalent to 'node.children[i] = child'. This method also sets the + child's parent attribute appropriately. + """ + child.parent = self + self.children[i].parent = None + self.children[i] = child + self.changed() + + def insert_child(self, i, child): + """ + Equivalent to 'node.children.insert(i, child)'. This method also sets + the child's parent attribute appropriately. + """ + child.parent = self + self.children.insert(i, child) + self.changed() + + def append_child(self, child): + """ + Equivalent to 'node.children.append(child)'. This method also sets the + child's parent attribute appropriately. + """ + child.parent = self + self.children.append(child) + self.changed() + + +class Leaf(Base): + + """Concrete implementation for leaf nodes.""" + + # Default values for instance variables + _prefix = "" # Whitespace and comments preceding this token in the input + lineno = 0 # Line where this token starts in the input + column = 0 # Column where this token tarts in the input + + def __init__(self, type, value, + context=None, + prefix=None, + fixers_applied=[]): + """ + Initializer. + + Takes a type constant (a token number < 256), a string value, and an + optional context keyword argument. + """ + assert 0 <= type < 256, type + if context is not None: + self._prefix, (self.lineno, self.column) = context + self.type = type + self.value = value + if prefix is not None: + self._prefix = prefix + self.fixers_applied = fixers_applied[:] + + def __repr__(self): + """Return a canonical string representation.""" + return "%s(%r, %r)" % (self.__class__.__name__, + self.type, + self.value) + + def __unicode__(self): + """ + Return a pretty string representation. + + This reproduces the input source exactly. + """ + return self.prefix + str(self.value) + + if sys.version_info > (3, 0): + __str__ = __unicode__ + + def _eq(self, other): + """Compare two nodes for equality.""" + return (self.type, self.value) == (other.type, other.value) + + def clone(self): + """Return a cloned (deep) copy of self.""" + return Leaf(self.type, self.value, + (self.prefix, (self.lineno, self.column)), + fixers_applied=self.fixers_applied) + + def leaves(self): + yield self + + def post_order(self): + """Return a post-order iterator for the tree.""" + yield self + + def pre_order(self): + """Return a pre-order iterator for the tree.""" + yield self + + def _prefix_getter(self): + """ + The whitespace and comments preceding this token in the input. + """ + return self._prefix + + def _prefix_setter(self, prefix): + self.changed() + self._prefix = prefix + + prefix = property(_prefix_getter, _prefix_setter) + +def convert(gr, raw_node): + """ + Convert raw node information to a Node or Leaf instance. + + This is passed to the parser driver which calls it whenever a reduction of a + grammar rule produces a new complete node, so that the tree is build + strictly bottom-up. + """ + type, value, context, children = raw_node + if children or type in gr.number2symbol: + # If there's exactly one child, return that child instead of + # creating a new node. + if len(children) == 1: + return children[0] + return Node(type, children, context=context) + else: + return Leaf(type, value, context=context) + + +class BasePattern(object): + + """ + A pattern is a tree matching pattern. + + It looks for a specific node type (token or symbol), and + optionally for a specific content. + + This is an abstract base class. There are three concrete + subclasses: + + - LeafPattern matches a single leaf node; + - NodePattern matches a single node (usually non-leaf); + - WildcardPattern matches a sequence of nodes of variable length. + """ + + # Defaults for instance variables + type = None # Node type (token if < 256, symbol if >= 256) + content = None # Optional content matching pattern + name = None # Optional name used to store match in results dict + + def __new__(cls, *args, **kwds): + """Constructor that prevents BasePattern from being instantiated.""" + assert cls is not BasePattern, "Cannot instantiate BasePattern" + return object.__new__(cls) + + def __repr__(self): + args = [type_repr(self.type), self.content, self.name] + while args and args[-1] is None: + del args[-1] + return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args))) + + def optimize(self): + """ + A subclass can define this as a hook for optimizations. + + Returns either self or another node with the same effect. + """ + return self + + def match(self, node, results=None): + """ + Does this pattern exactly match a node? + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + Default implementation for non-wildcard patterns. + """ + if self.type is not None and node.type != self.type: + return False + if self.content is not None: + r = None + if results is not None: + r = {} + if not self._submatch(node, r): + return False + if r: + results.update(r) + if results is not None and self.name: + results[self.name] = node + return True + + def match_seq(self, nodes, results=None): + """ + Does this pattern exactly match a sequence of nodes? + + Default implementation for non-wildcard patterns. + """ + if len(nodes) != 1: + return False + return self.match(nodes[0], results) + + def generate_matches(self, nodes): + """ + Generator yielding all matches for this pattern. + + Default implementation for non-wildcard patterns. + """ + r = {} + if nodes and self.match(nodes[0], r): + yield 1, r + + +class LeafPattern(BasePattern): + + def __init__(self, type=None, content=None, name=None): + """ + Initializer. Takes optional type, content, and name. + + The type, if given must be a token type (< 256). If not given, + this matches any *leaf* node; the content may still be required. + + The content, if given, must be a string. + + If a name is given, the matching node is stored in the results + dict under that key. + """ + if type is not None: + assert 0 <= type < 256, type + if content is not None: + assert isinstance(content, str), repr(content) + self.type = type + self.content = content + self.name = name + + def match(self, node, results=None): + """Override match() to insist on a leaf node.""" + if not isinstance(node, Leaf): + return False + return BasePattern.match(self, node, results) + + def _submatch(self, node, results=None): + """ + Match the pattern's content to the node's children. + + This assumes the node type matches and self.content is not None. + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + When returning False, the results dict may still be updated. + """ + return self.content == node.value + + +class NodePattern(BasePattern): + + wildcards = False + + def __init__(self, type=None, content=None, name=None): + """ + Initializer. Takes optional type, content, and name. + + The type, if given, must be a symbol type (>= 256). If the + type is None this matches *any* single node (leaf or not), + except if content is not None, in which it only matches + non-leaf nodes that also match the content pattern. + + The content, if not None, must be a sequence of Patterns that + must match the node's children exactly. If the content is + given, the type must not be None. + + If a name is given, the matching node is stored in the results + dict under that key. + """ + if type is not None: + assert type >= 256, type + if content is not None: + assert not isinstance(content, str), repr(content) + content = list(content) + for i, item in enumerate(content): + assert isinstance(item, BasePattern), (i, item) + if isinstance(item, WildcardPattern): + self.wildcards = True + self.type = type + self.content = content + self.name = name + + def _submatch(self, node, results=None): + """ + Match the pattern's content to the node's children. + + This assumes the node type matches and self.content is not None. + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + When returning False, the results dict may still be updated. + """ + if self.wildcards: + for c, r in generate_matches(self.content, node.children): + if c == len(node.children): + if results is not None: + results.update(r) + return True + return False + if len(self.content) != len(node.children): + return False + for subpattern, child in zip(self.content, node.children): + if not subpattern.match(child, results): + return False + return True + + +class WildcardPattern(BasePattern): + + """ + A wildcard pattern can match zero or more nodes. + + This has all the flexibility needed to implement patterns like: + + .* .+ .? .{m,n} + (a b c | d e | f) + (...)* (...)+ (...)? (...){m,n} + + except it always uses non-greedy matching. + """ + + def __init__(self, content=None, min=0, max=HUGE, name=None): + """ + Initializer. + + Args: + content: optional sequence of subsequences of patterns; + if absent, matches one node; + if present, each subsequence is an alternative [*] + min: optinal minumum number of times to match, default 0 + max: optional maximum number of times tro match, default HUGE + name: optional name assigned to this match + + [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is + equivalent to (a b c | d e | f g h); if content is None, + this is equivalent to '.' in regular expression terms. + The min and max parameters work as follows: + min=0, max=maxint: .* + min=1, max=maxint: .+ + min=0, max=1: .? + min=1, max=1: . + If content is not None, replace the dot with the parenthesized + list of alternatives, e.g. (a b c | d e | f g h)* + """ + assert 0 <= min <= max <= HUGE, (min, max) + if content is not None: + content = tuple(map(tuple, content)) # Protect against alterations + # Check sanity of alternatives + assert len(content), repr(content) # Can't have zero alternatives + for alt in content: + assert len(alt), repr(alt) # Can have empty alternatives + self.content = content + self.min = min + self.max = max + self.name = name + + def optimize(self): + """Optimize certain stacked wildcard patterns.""" + subpattern = None + if (self.content is not None and + len(self.content) == 1 and len(self.content[0]) == 1): + subpattern = self.content[0][0] + if self.min == 1 and self.max == 1: + if self.content is None: + return NodePattern(name=self.name) + if subpattern is not None and self.name == subpattern.name: + return subpattern.optimize() + if (self.min <= 1 and isinstance(subpattern, WildcardPattern) and + subpattern.min <= 1 and self.name == subpattern.name): + return WildcardPattern(subpattern.content, + self.min*subpattern.min, + self.max*subpattern.max, + subpattern.name) + return self + + def match(self, node, results=None): + """Does this pattern exactly match a node?""" + return self.match_seq([node], results) + + def match_seq(self, nodes, results=None): + """Does this pattern exactly match a sequence of nodes?""" + for c, r in self.generate_matches(nodes): + if c == len(nodes): + if results is not None: + results.update(r) + if self.name: + results[self.name] = list(nodes) + return True + return False + + def generate_matches(self, nodes): + """ + Generator yielding matches for a sequence of nodes. + + Args: + nodes: sequence of nodes + + Yields: + (count, results) tuples where: + count: the match comprises nodes[:count]; + results: dict containing named submatches. + """ + if self.content is None: + # Shortcut for special case (see __init__.__doc__) + for count in range(self.min, 1 + min(len(nodes), self.max)): + r = {} + if self.name: + r[self.name] = nodes[:count] + yield count, r + elif self.name == "bare_name": + yield self._bare_name_matches(nodes) + else: + # The reason for this is that hitting the recursion limit usually + # results in some ugly messages about how RuntimeErrors are being + # ignored. + save_stderr = sys.stderr + sys.stderr = StringIO() + try: + for count, r in self._recursive_matches(nodes, 0): + if self.name: + r[self.name] = nodes[:count] + yield count, r + except RuntimeError: + # We fall back to the iterative pattern matching scheme if the recursive + # scheme hits the recursion limit. + for count, r in self._iterative_matches(nodes): + if self.name: + r[self.name] = nodes[:count] + yield count, r + finally: + sys.stderr = save_stderr + + def _iterative_matches(self, nodes): + """Helper to iteratively yield the matches.""" + nodelen = len(nodes) + if 0 >= self.min: + yield 0, {} + + results = [] + # generate matches that use just one alt from self.content + for alt in self.content: + for c, r in generate_matches(alt, nodes): + yield c, r + results.append((c, r)) + + # for each match, iterate down the nodes + while results: + new_results = [] + for c0, r0 in results: + # stop if the entire set of nodes has been matched + if c0 < nodelen and c0 <= self.max: + for alt in self.content: + for c1, r1 in generate_matches(alt, nodes[c0:]): + if c1 > 0: + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r + new_results.append((c0 + c1, r)) + results = new_results + + def _bare_name_matches(self, nodes): + """Special optimized matcher for bare_name.""" + count = 0 + r = {} + done = False + max = len(nodes) + while not done and count < max: + done = True + for leaf in self.content: + if leaf[0].match(nodes[count], r): + count += 1 + done = False + break + r[self.name] = nodes[:count] + return count, r + + def _recursive_matches(self, nodes, count): + """Helper to recursively yield the matches.""" + assert self.content is not None + if count >= self.min: + yield 0, {} + if count < self.max: + for alt in self.content: + for c0, r0 in generate_matches(alt, nodes): + for c1, r1 in self._recursive_matches(nodes[c0:], count+1): + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r + + +class NegatedPattern(BasePattern): + + def __init__(self, content=None): + """ + Initializer. + + The argument is either a pattern or None. If it is None, this + only matches an empty sequence (effectively '$' in regex + lingo). If it is not None, this matches whenever the argument + pattern doesn't have any matches. + """ + if content is not None: + assert isinstance(content, BasePattern), repr(content) + self.content = content + + def match(self, node): + # We never match a node in its entirety + return False + + def match_seq(self, nodes): + # We only match an empty sequence of nodes in its entirety + return len(nodes) == 0 + + def generate_matches(self, nodes): + if self.content is None: + # Return a match if there is an empty sequence + if len(nodes) == 0: + yield 0, {} + else: + # Return a match if the argument pattern has no matches + for c, r in self.content.generate_matches(nodes): + return + yield 0, {} + + +def generate_matches(patterns, nodes): + """ + Generator yielding matches for a sequence of patterns and nodes. + + Args: + patterns: a sequence of patterns + nodes: a sequence of nodes + + Yields: + (count, results) tuples where: + count: the entire sequence of patterns matches nodes[:count]; + results: dict containing named submatches. + """ + if not patterns: + yield 0, {} + else: + p, rest = patterns[0], patterns[1:] + for c0, r0 in p.generate_matches(nodes): + if not rest: + yield c0, r0 + else: + for c1, r1 in generate_matches(rest, nodes[c0:]): + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r diff --git a/lib3/2to3/lib2to3/refactor.py b/lib3/2to3/lib2to3/refactor.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/refactor.py @@ -0,0 +1,741 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Refactoring framework. + +Used as a main program, this can refactor any number of files and/or +recursively descend down directories. Imported as a module, this +provides infrastructure to write your own refactoring tool. +""" + + + +__author__ = "Guido van Rossum " + + +# Python imports +import os +import sys +import logging +import operator +import collections +import io +from itertools import chain + +# Local imports +from .pgen2 import driver, tokenize, token +from .fixer_util import find_root +from . import pytree, pygram +from . import btm_utils as bu +from . import btm_matcher as bm + + +def get_all_fix_names(fixer_pkg, remove_prefix=True): + """Return a sorted list of all available fix names in the given package.""" + pkg = __import__(fixer_pkg, [], [], ["*"]) + fixer_dir = os.path.dirname(pkg.__file__) + fix_names = [] + for name in sorted(os.listdir(fixer_dir)): + if name.startswith("fix_") and name.endswith(".py"): + if remove_prefix: + name = name[4:] + fix_names.append(name[:-3]) + return fix_names + + +class _EveryNode(Exception): + pass + + +def _get_head_types(pat): + """ Accepts a pytree Pattern Node and returns a set + of the pattern types which will match first. """ + + if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)): + # NodePatters must either have no type and no content + # or a type and content -- so they don't get any farther + # Always return leafs + if pat.type is None: + raise _EveryNode + return set([pat.type]) + + if isinstance(pat, pytree.NegatedPattern): + if pat.content: + return _get_head_types(pat.content) + raise _EveryNode # Negated Patterns don't have a type + + if isinstance(pat, pytree.WildcardPattern): + # Recurse on each node in content + r = set() + for p in pat.content: + for x in p: + r.update(_get_head_types(x)) + return r + + raise Exception("Oh no! I don't understand pattern %s" %(pat)) + + +def _get_headnode_dict(fixer_list): + """ Accepts a list of fixers and returns a dictionary + of head node type --> fixer list. """ + head_nodes = collections.defaultdict(list) + every = [] + for fixer in fixer_list: + if fixer.pattern: + try: + heads = _get_head_types(fixer.pattern) + except _EveryNode: + every.append(fixer) + else: + for node_type in heads: + head_nodes[node_type].append(fixer) + else: + if fixer._accept_type is not None: + head_nodes[fixer._accept_type].append(fixer) + else: + every.append(fixer) + for node_type in chain(iter(pygram.python_grammar.symbol2number.values()), + pygram.python_grammar.tokens): + head_nodes[node_type].extend(every) + return dict(head_nodes) + + +def get_fixers_from_package(pkg_name): + """ + Return the fully qualified names for fixers in the package pkg_name. + """ + return [pkg_name + "." + fix_name + for fix_name in get_all_fix_names(pkg_name, False)] + +def _identity(obj): + return obj + +if sys.version_info < (3, 0): + import codecs + _open_with_encoding = codecs.open + # codecs.open doesn't translate newlines sadly. + def _from_system_newlines(input): + return input.replace("\r\n", "\n") + def _to_system_newlines(input): + if os.linesep != "\n": + return input.replace("\n", os.linesep) + else: + return input +else: + _open_with_encoding = open + _from_system_newlines = _identity + _to_system_newlines = _identity + + +def _detect_future_features(source): + have_docstring = False + gen = tokenize.generate_tokens(io.StringIO(source).readline) + def advance(): + tok = next(gen) + return tok[0], tok[1] + ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT)) + features = set() + try: + while True: + tp, value = advance() + if tp in ignore: + continue + elif tp == token.STRING: + if have_docstring: + break + have_docstring = True + elif tp == token.NAME and value == "from": + tp, value = advance() + if tp != token.NAME or value != "__future__": + break + tp, value = advance() + if tp != token.NAME or value != "import": + break + tp, value = advance() + if tp == token.OP and value == "(": + tp, value = advance() + while tp == token.NAME: + features.add(value) + tp, value = advance() + if tp != token.OP or value != ",": + break + tp, value = advance() + else: + break + except StopIteration: + pass + return frozenset(features) + + +class FixerError(Exception): + """A fixer could not be loaded.""" + + +class RefactoringTool(object): + + _default_options = {"print_function" : False} + + CLASS_PREFIX = "Fix" # The prefix for fixer classes + FILE_PREFIX = "fix_" # The prefix for modules with a fixer within + + def __init__(self, fixer_names, options=None, explicit=None): + """Initializer. + + Args: + fixer_names: a list of fixers to import + options: an dict with configuration. + explicit: a list of fixers to run even if they are explicit. + """ + self.fixers = fixer_names + self.explicit = explicit or [] + self.options = self._default_options.copy() + if options is not None: + self.options.update(options) + if self.options["print_function"]: + self.grammar = pygram.python_grammar_no_print_statement + else: + self.grammar = pygram.python_grammar + self.errors = [] + self.logger = logging.getLogger("RefactoringTool") + self.fixer_log = [] + self.wrote = False + self.driver = driver.Driver(self.grammar, + convert=pytree.convert, + logger=self.logger) + self.pre_order, self.post_order = self.get_fixers() + + + self.files = [] # List of files that were or should be modified + + self.BM = bm.BottomMatcher() + self.bmi_pre_order = [] # Bottom Matcher incompatible fixers + self.bmi_post_order = [] + + for fixer in chain(self.post_order, self.pre_order): + if fixer.BM_compatible: + self.BM.add_fixer(fixer) + # remove fixers that will be handled by the bottom-up + # matcher + elif fixer in self.pre_order: + self.bmi_pre_order.append(fixer) + elif fixer in self.post_order: + self.bmi_post_order.append(fixer) + + self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order) + self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order) + + + + def get_fixers(self): + """Inspects the options to load the requested patterns and handlers. + + Returns: + (pre_order, post_order), where pre_order is the list of fixers that + want a pre-order AST traversal, and post_order is the list that want + post-order traversal. + """ + pre_order_fixers = [] + post_order_fixers = [] + for fix_mod_path in self.fixers: + mod = __import__(fix_mod_path, {}, {}, ["*"]) + fix_name = fix_mod_path.rsplit(".", 1)[-1] + if fix_name.startswith(self.FILE_PREFIX): + fix_name = fix_name[len(self.FILE_PREFIX):] + parts = fix_name.split("_") + class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts]) + try: + fix_class = getattr(mod, class_name) + except AttributeError: + raise FixerError("Can't find %s.%s" % (fix_name, class_name)) + fixer = fix_class(self.options, self.fixer_log) + if fixer.explicit and self.explicit is not True and \ + fix_mod_path not in self.explicit: + self.log_message("Skipping implicit fixer: %s", fix_name) + continue + + self.log_debug("Adding transformation: %s", fix_name) + if fixer.order == "pre": + pre_order_fixers.append(fixer) + elif fixer.order == "post": + post_order_fixers.append(fixer) + else: + raise FixerError("Illegal fixer order: %r" % fixer.order) + + key_func = operator.attrgetter("run_order") + pre_order_fixers.sort(key=key_func) + post_order_fixers.sort(key=key_func) + return (pre_order_fixers, post_order_fixers) + + def log_error(self, msg, *args, **kwds): + """Called when an error occurs.""" + raise + + def log_message(self, msg, *args): + """Hook to log a message.""" + if args: + msg = msg % args + self.logger.info(msg) + + def log_debug(self, msg, *args): + if args: + msg = msg % args + self.logger.debug(msg) + + def print_output(self, old_text, new_text, filename, equal): + """Called with the old version, new version, and filename of a + refactored file.""" + pass + + def refactor(self, items, write=False, doctests_only=False): + """Refactor a list of files and directories.""" + + for dir_or_file in items: + if os.path.isdir(dir_or_file): + self.refactor_dir(dir_or_file, write, doctests_only) + else: + self.refactor_file(dir_or_file, write, doctests_only) + + def refactor_dir(self, dir_name, write=False, doctests_only=False): + """Descends down a directory and refactor every Python file found. + + Python files are assumed to have a .py extension. + + Files and subdirectories starting with '.' are skipped. + """ + py_ext = os.extsep + "py" + for dirpath, dirnames, filenames in os.walk(dir_name): + self.log_debug("Descending into %s", dirpath) + dirnames.sort() + filenames.sort() + for name in filenames: + if (not name.startswith(".") and + os.path.splitext(name)[1] == py_ext): + fullname = os.path.join(dirpath, name) + self.refactor_file(fullname, write, doctests_only) + # Modify dirnames in-place to remove subdirs with leading dots + dirnames[:] = [dn for dn in dirnames if not dn.startswith(".")] + + def _read_python_source(self, filename): + """ + Do our best to decode a Python source file correctly. + """ + try: + f = open(filename, "rb") + except IOError as err: + self.log_error("Can't open %s: %s", filename, err) + return None, None + try: + encoding = tokenize.detect_encoding(f.readline)[0] + finally: + f.close() + with _open_with_encoding(filename, "r", encoding=encoding) as f: + return _from_system_newlines(f.read()), encoding + + def refactor_file(self, filename, write=False, doctests_only=False): + """Refactors a file.""" + input, encoding = self._read_python_source(filename) + if input is None: + # Reading the file failed. + return + input += "\n" # Silence certain parse errors + if doctests_only: + self.log_debug("Refactoring doctests in %s", filename) + output = self.refactor_docstring(input, filename) + if output != input: + self.processed_file(output, filename, input, write, encoding) + else: + self.log_debug("No doctest changes in %s", filename) + else: + tree = self.refactor_string(input, filename) + if tree and tree.was_changed: + # The [:-1] is to take off the \n we added earlier + self.processed_file(str(tree)[:-1], filename, + write=write, encoding=encoding) + else: + self.log_debug("No changes in %s", filename) + + def refactor_string(self, data, name): + """Refactor a given input string. + + Args: + data: a string holding the code to be refactored. + name: a human-readable name for use in error/log messages. + + Returns: + An AST corresponding to the refactored input stream; None if + there were errors during the parse. + """ + features = _detect_future_features(data) + if "print_function" in features: + self.driver.grammar = pygram.python_grammar_no_print_statement + try: + tree = self.driver.parse_string(data) + except Exception as err: + self.log_error("Can't parse %s: %s: %s", + name, err.__class__.__name__, err) + return + finally: + self.driver.grammar = self.grammar + tree.future_features = features + self.log_debug("Refactoring %s", name) + self.refactor_tree(tree, name) + return tree + + def refactor_stdin(self, doctests_only=False): + input = sys.stdin.read() + if doctests_only: + self.log_debug("Refactoring doctests in stdin") + output = self.refactor_docstring(input, "") + if output != input: + self.processed_file(output, "", input) + else: + self.log_debug("No doctest changes in stdin") + else: + tree = self.refactor_string(input, "") + if tree and tree.was_changed: + self.processed_file(str(tree), "", input) + else: + self.log_debug("No changes in stdin") + + def refactor_tree(self, tree, name): + """Refactors a parse tree (modifying the tree in place). + + For compatible patterns the bottom matcher module is + used. Otherwise the tree is traversed node-to-node for + matches. + + Args: + tree: a pytree.Node instance representing the root of the tree + to be refactored. + name: a human-readable name for this tree. + + Returns: + True if the tree was modified, False otherwise. + """ + + for fixer in chain(self.pre_order, self.post_order): + fixer.start_tree(tree, name) + + #use traditional matching for the incompatible fixers + self.traverse_by(self.bmi_pre_order_heads, tree.pre_order()) + self.traverse_by(self.bmi_post_order_heads, tree.post_order()) + + # obtain a set of candidate nodes + match_set = self.BM.run(tree.leaves()) + + while any(match_set.values()): + for fixer in self.BM.fixers: + if fixer in match_set and match_set[fixer]: + #sort by depth; apply fixers from bottom(of the AST) to top + match_set[fixer].sort(key=pytree.Base.depth, reverse=True) + + if fixer.keep_line_order: + #some fixers(eg fix_imports) must be applied + #with the original file's line order + match_set[fixer].sort(key=pytree.Base.get_lineno) + + for node in list(match_set[fixer]): + if node in match_set[fixer]: + match_set[fixer].remove(node) + + try: + find_root(node) + except AssertionError: + # this node has been cut off from a + # previous transformation ; skip + continue + + if node.fixers_applied and fixer in node.fixers_applied: + # do not apply the same fixer again + continue + + results = fixer.match(node) + + if results: + new = fixer.transform(node, results) + if new is not None: + node.replace(new) + #new.fixers_applied.append(fixer) + for node in new.post_order(): + # do not apply the fixer again to + # this or any subnode + if not node.fixers_applied: + node.fixers_applied = [] + node.fixers_applied.append(fixer) + + # update the original match set for + # the added code + new_matches = self.BM.run(new.leaves()) + for fxr in new_matches: + if not fxr in match_set: + match_set[fxr]=[] + + match_set[fxr].extend(new_matches[fxr]) + + for fixer in chain(self.pre_order, self.post_order): + fixer.finish_tree(tree, name) + return tree.was_changed + + def traverse_by(self, fixers, traversal): + """Traverse an AST, applying a set of fixers to each node. + + This is a helper method for refactor_tree(). + + Args: + fixers: a list of fixer instances. + traversal: a generator that yields AST nodes. + + Returns: + None + """ + if not fixers: + return + for node in traversal: + for fixer in fixers[node.type]: + results = fixer.match(node) + if results: + new = fixer.transform(node, results) + if new is not None: + node.replace(new) + node = new + + def processed_file(self, new_text, filename, old_text=None, write=False, + encoding=None): + """ + Called when a file has been refactored, and there are changes. + """ + self.files.append(filename) + if old_text is None: + old_text = self._read_python_source(filename)[0] + if old_text is None: + return + equal = old_text == new_text + self.print_output(old_text, new_text, filename, equal) + if equal: + self.log_debug("No changes to %s", filename) + return + if write: + self.write_file(new_text, filename, old_text, encoding) + else: + self.log_debug("Not writing changes to %s", filename) + + def write_file(self, new_text, filename, old_text, encoding=None): + """Writes a string to a file. + + It first shows a unified diff between the old text and the new text, and + then rewrites the file; the latter is only done if the write option is + set. + """ + try: + f = _open_with_encoding(filename, "w", encoding=encoding) + except os.error as err: + self.log_error("Can't create %s: %s", filename, err) + return + try: + f.write(_to_system_newlines(new_text)) + except os.error as err: + self.log_error("Can't write %s: %s", filename, err) + finally: + f.close() + self.log_debug("Wrote changes to %s", filename) + self.wrote = True + + PS1 = ">>> " + PS2 = "... " + + def refactor_docstring(self, input, filename): + """Refactors a docstring, looking for doctests. + + This returns a modified version of the input string. It looks + for doctests, which start with a ">>>" prompt, and may be + continued with "..." prompts, as long as the "..." is indented + the same as the ">>>". + + (Unfortunately we can't use the doctest module's parser, + since, like most parsers, it is not geared towards preserving + the original source.) + """ + result = [] + block = None + block_lineno = None + indent = None + lineno = 0 + for line in input.splitlines(True): + lineno += 1 + if line.lstrip().startswith(self.PS1): + if block is not None: + result.extend(self.refactor_doctest(block, block_lineno, + indent, filename)) + block_lineno = lineno + block = [line] + i = line.find(self.PS1) + indent = line[:i] + elif (indent is not None and + (line.startswith(indent + self.PS2) or + line == indent + self.PS2.rstrip() + "\n")): + block.append(line) + else: + if block is not None: + result.extend(self.refactor_doctest(block, block_lineno, + indent, filename)) + block = None + indent = None + result.append(line) + if block is not None: + result.extend(self.refactor_doctest(block, block_lineno, + indent, filename)) + return "".join(result) + + def refactor_doctest(self, block, lineno, indent, filename): + """Refactors one doctest. + + A doctest is given as a block of lines, the first of which starts + with ">>>" (possibly indented), while the remaining lines start + with "..." (identically indented). + + """ + try: + tree = self.parse_block(block, lineno, indent) + except Exception as err: + if self.logger.isEnabledFor(logging.DEBUG): + for line in block: + self.log_debug("Source: %s", line.rstrip("\n")) + self.log_error("Can't parse docstring in %s line %s: %s: %s", + filename, lineno, err.__class__.__name__, err) + return block + if self.refactor_tree(tree, filename): + new = str(tree).splitlines(True) + # Undo the adjustment of the line numbers in wrap_toks() below. + clipped, new = new[:lineno-1], new[lineno-1:] + assert clipped == ["\n"] * (lineno-1), clipped + if not new[-1].endswith("\n"): + new[-1] += "\n" + block = [indent + self.PS1 + new.pop(0)] + if new: + block += [indent + self.PS2 + line for line in new] + return block + + def summarize(self): + if self.wrote: + were = "were" + else: + were = "need to be" + if not self.files: + self.log_message("No files %s modified.", were) + else: + self.log_message("Files that %s modified:", were) + for file in self.files: + self.log_message(file) + if self.fixer_log: + self.log_message("Warnings/messages while refactoring:") + for message in self.fixer_log: + self.log_message(message) + if self.errors: + if len(self.errors) == 1: + self.log_message("There was 1 error:") + else: + self.log_message("There were %d errors:", len(self.errors)) + for msg, args, kwds in self.errors: + self.log_message(msg, *args, **kwds) + + def parse_block(self, block, lineno, indent): + """Parses a block into a tree. + + This is necessary to get correct line number / offset information + in the parser diagnostics and embedded into the parse tree. + """ + tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent)) + tree.future_features = frozenset() + return tree + + def wrap_toks(self, block, lineno, indent): + """Wraps a tokenize stream to systematically modify start/end.""" + tokens = tokenize.generate_tokens(self.gen_lines(block, indent).__next__) + for type, value, (line0, col0), (line1, col1), line_text in tokens: + line0 += lineno - 1 + line1 += lineno - 1 + # Don't bother updating the columns; this is too complicated + # since line_text would also have to be updated and it would + # still break for tokens spanning lines. Let the user guess + # that the column numbers for doctests are relative to the + # end of the prompt string (PS1 or PS2). + yield type, value, (line0, col0), (line1, col1), line_text + + + def gen_lines(self, block, indent): + """Generates lines as expected by tokenize from a list of lines. + + This strips the first len(indent + self.PS1) characters off each line. + """ + prefix1 = indent + self.PS1 + prefix2 = indent + self.PS2 + prefix = prefix1 + for line in block: + if line.startswith(prefix): + yield line[len(prefix):] + elif line == prefix.rstrip() + "\n": + yield "\n" + else: + raise AssertionError("line=%r, prefix=%r" % (line, prefix)) + prefix = prefix2 + while True: + yield "" + + +class MultiprocessingUnsupported(Exception): + pass + + +class MultiprocessRefactoringTool(RefactoringTool): + + def __init__(self, *args, **kwargs): + super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs) + self.queue = None + self.output_lock = None + + def refactor(self, items, write=False, doctests_only=False, + num_processes=1): + if num_processes == 1: + return super(MultiprocessRefactoringTool, self).refactor( + items, write, doctests_only) + try: + import multiprocessing + except ImportError: + raise MultiprocessingUnsupported + if self.queue is not None: + raise RuntimeError("already doing multiple processes") + self.queue = multiprocessing.JoinableQueue() + self.output_lock = multiprocessing.Lock() + processes = [multiprocessing.Process(target=self._child) + for i in range(num_processes)] + try: + for p in processes: + p.start() + super(MultiprocessRefactoringTool, self).refactor(items, write, + doctests_only) + finally: + self.queue.join() + for i in range(num_processes): + self.queue.put(None) + for p in processes: + if p.is_alive(): + p.join() + self.queue = None + + def _child(self): + task = self.queue.get() + while task is not None: + args, kwargs = task + try: + super(MultiprocessRefactoringTool, self).refactor_file( + *args, **kwargs) + finally: + self.queue.task_done() + task = self.queue.get() + + def refactor_file(self, *args, **kwargs): + if self.queue is not None: + self.queue.put((args, kwargs)) + else: + return super(MultiprocessRefactoringTool, self).refactor_file( + *args, **kwargs) diff --git a/lib3/2to3/lib2to3/tests/__init__.py b/lib3/2to3/lib2to3/tests/__init__.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/__init__.py @@ -0,0 +1,24 @@ +"""Make tests/ into a package. This allows us to "import tests" and +have tests.all_tests be a TestSuite representing all test cases +from all test_*.py files in tests/.""" +# Author: Collin Winter + +import os +import os.path +import unittest +import types + +from . import support + +all_tests = unittest.TestSuite() + +tests_dir = os.path.join(os.path.dirname(__file__), '..', 'tests') +tests = [t[0:-3] for t in os.listdir(tests_dir) + if t.startswith('test_') and t.endswith('.py')] + +loader = unittest.TestLoader() + +for t in tests: + __import__("",globals(),locals(),[t],level=1) + mod = globals()[t] + all_tests.addTests(loader.loadTestsFromModule(mod)) diff --git a/lib3/2to3/lib2to3/tests/data/README b/lib3/2to3/lib2to3/tests/data/README new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/README @@ -0,0 +1,6 @@ +In this directory: +- py2_test_grammar.py -- test file that exercises most/all of Python 2.x's grammar. +- py3_test_grammar.py -- test file that exercises most/all of Python 3.x's grammar. +- infinite_recursion.py -- test file that causes lib2to3's faster recursive pattern matching + scheme to fail, but passes when lib2to3 falls back to iterative pattern matching. +- fixes/ -- for use by test_refactor.py diff --git a/lib3/2to3/lib2to3/tests/data/bom.py b/lib3/2to3/lib2to3/tests/data/bom.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/bom.py @@ -0,0 +1,2 @@ +???# coding: utf-8 +print("BOM BOOM!") diff --git a/lib3/2to3/lib2to3/tests/data/crlf.py b/lib3/2to3/lib2to3/tests/data/crlf.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/crlf.py @@ -0,0 +1,3 @@ +print("hi") + +print("Like bad Windows newlines?") diff --git a/lib3/2to3/lib2to3/tests/data/different_encoding.py b/lib3/2to3/lib2to3/tests/data/different_encoding.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/different_encoding.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +print('????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????????') + +def f(x): + print('%s\t-> ??(%2i):%s ??(%s)') diff --git a/lib3/2to3/lib2to3/tests/data/fixers/bad_order.py b/lib3/2to3/lib2to3/tests/data/fixers/bad_order.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/fixers/bad_order.py @@ -0,0 +1,5 @@ +from lib2to3.fixer_base import BaseFix + +class FixBadOrder(BaseFix): + + order = "crazy" diff --git a/lib3/2to3/lib2to3/tests/data/fixers/myfixes/__init__.py b/lib3/2to3/lib2to3/tests/data/fixers/myfixes/__init__.py new file mode 100644 diff --git a/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_explicit.py b/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_explicit.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_explicit.py @@ -0,0 +1,6 @@ +from lib2to3.fixer_base import BaseFix + +class FixExplicit(BaseFix): + explicit = True + + def match(self): return False diff --git a/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_first.py b/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_first.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_first.py @@ -0,0 +1,6 @@ +from lib2to3.fixer_base import BaseFix + +class FixFirst(BaseFix): + run_order = 1 + + def match(self, node): return False diff --git a/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_last.py b/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_last.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_last.py @@ -0,0 +1,7 @@ +from lib2to3.fixer_base import BaseFix + +class FixLast(BaseFix): + + run_order = 10 + + def match(self, node): return False diff --git a/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_parrot.py b/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_parrot.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_parrot.py @@ -0,0 +1,13 @@ +from lib2to3.fixer_base import BaseFix +from lib2to3.fixer_util import Name + +class FixParrot(BaseFix): + """ + Change functions named 'parrot' to 'cheese'. + """ + + PATTERN = """funcdef < 'def' name='parrot' any* >""" + + def transform(self, node, results): + name = results["name"] + name.replace(Name("cheese", name.prefix)) diff --git a/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_preorder.py b/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_preorder.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/fixers/myfixes/fix_preorder.py @@ -0,0 +1,6 @@ +from lib2to3.fixer_base import BaseFix + +class FixPreorder(BaseFix): + order = "pre" + + def match(self, node): return False diff --git a/lib3/2to3/lib2to3/tests/data/fixers/no_fixer_cls.py b/lib3/2to3/lib2to3/tests/data/fixers/no_fixer_cls.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/fixers/no_fixer_cls.py @@ -0,0 +1,1 @@ +# This is empty so trying to fetch the fixer class gives an AttributeError diff --git a/lib3/2to3/lib2to3/tests/data/fixers/parrot_example.py b/lib3/2to3/lib2to3/tests/data/fixers/parrot_example.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/fixers/parrot_example.py @@ -0,0 +1,2 @@ +def parrot(): + pass diff --git a/lib3/2to3/lib2to3/tests/data/infinite_recursion.py b/lib3/2to3/lib2to3/tests/data/infinite_recursion.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/infinite_recursion.py @@ -0,0 +1,2669 @@ +# This file is used to verify that 2to3 falls back to a slower, iterative pattern matching +# scheme in the event that the faster recursive system fails due to infinite recursion. +from ctypes import * +STRING = c_char_p + + +OSUnknownByteOrder = 0 +UIT_PROMPT = 1 +P_PGID = 2 +P_PID = 1 +UIT_ERROR = 5 +UIT_INFO = 4 +UIT_NONE = 0 +P_ALL = 0 +UIT_VERIFY = 2 +OSBigEndian = 2 +UIT_BOOLEAN = 3 +OSLittleEndian = 1 +__darwin_nl_item = c_int +__darwin_wctrans_t = c_int +__darwin_wctype_t = c_ulong +__int8_t = c_byte +__uint8_t = c_ubyte +__int16_t = c_short +__uint16_t = c_ushort +__int32_t = c_int +__uint32_t = c_uint +__int64_t = c_longlong +__uint64_t = c_ulonglong +__darwin_intptr_t = c_long +__darwin_natural_t = c_uint +__darwin_ct_rune_t = c_int +class __mbstate_t(Union): + pass +__mbstate_t._pack_ = 4 +__mbstate_t._fields_ = [ + ('__mbstate8', c_char * 128), + ('_mbstateL', c_longlong), +] +assert sizeof(__mbstate_t) == 128, sizeof(__mbstate_t) +assert alignment(__mbstate_t) == 4, alignment(__mbstate_t) +__darwin_mbstate_t = __mbstate_t +__darwin_ptrdiff_t = c_int +__darwin_size_t = c_ulong +__darwin_va_list = STRING +__darwin_wchar_t = c_int +__darwin_rune_t = __darwin_wchar_t +__darwin_wint_t = c_int +__darwin_clock_t = c_ulong +__darwin_socklen_t = __uint32_t +__darwin_ssize_t = c_long +__darwin_time_t = c_long +sig_atomic_t = c_int +class sigcontext(Structure): + pass +sigcontext._fields_ = [ + ('sc_onstack', c_int), + ('sc_mask', c_int), + ('sc_eax', c_uint), + ('sc_ebx', c_uint), + ('sc_ecx', c_uint), + ('sc_edx', c_uint), + ('sc_edi', c_uint), + ('sc_esi', c_uint), + ('sc_ebp', c_uint), + ('sc_esp', c_uint), + ('sc_ss', c_uint), + ('sc_eflags', c_uint), + ('sc_eip', c_uint), + ('sc_cs', c_uint), + ('sc_ds', c_uint), + ('sc_es', c_uint), + ('sc_fs', c_uint), + ('sc_gs', c_uint), +] +assert sizeof(sigcontext) == 72, sizeof(sigcontext) +assert alignment(sigcontext) == 4, alignment(sigcontext) +u_int8_t = c_ubyte +u_int16_t = c_ushort +u_int32_t = c_uint +u_int64_t = c_ulonglong +int32_t = c_int +register_t = int32_t +user_addr_t = u_int64_t +user_size_t = u_int64_t +int64_t = c_longlong +user_ssize_t = int64_t +user_long_t = int64_t +user_ulong_t = u_int64_t +user_time_t = int64_t +syscall_arg_t = u_int64_t + +# values for unnamed enumeration +class aes_key_st(Structure): + pass +aes_key_st._fields_ = [ + ('rd_key', c_ulong * 60), + ('rounds', c_int), +] +assert sizeof(aes_key_st) == 244, sizeof(aes_key_st) +assert alignment(aes_key_st) == 4, alignment(aes_key_st) +AES_KEY = aes_key_st +class asn1_ctx_st(Structure): + pass +asn1_ctx_st._fields_ = [ + ('p', POINTER(c_ubyte)), + ('eos', c_int), + ('error', c_int), + ('inf', c_int), + ('tag', c_int), + ('xclass', c_int), + ('slen', c_long), + ('max', POINTER(c_ubyte)), + ('q', POINTER(c_ubyte)), + ('pp', POINTER(POINTER(c_ubyte))), + ('line', c_int), +] +assert sizeof(asn1_ctx_st) == 44, sizeof(asn1_ctx_st) +assert alignment(asn1_ctx_st) == 4, alignment(asn1_ctx_st) +ASN1_CTX = asn1_ctx_st +class asn1_object_st(Structure): + pass +asn1_object_st._fields_ = [ + ('sn', STRING), + ('ln', STRING), + ('nid', c_int), + ('length', c_int), + ('data', POINTER(c_ubyte)), + ('flags', c_int), +] +assert sizeof(asn1_object_st) == 24, sizeof(asn1_object_st) +assert alignment(asn1_object_st) == 4, alignment(asn1_object_st) +ASN1_OBJECT = asn1_object_st +class asn1_string_st(Structure): + pass +asn1_string_st._fields_ = [ + ('length', c_int), + ('type', c_int), + ('data', POINTER(c_ubyte)), + ('flags', c_long), +] +assert sizeof(asn1_string_st) == 16, sizeof(asn1_string_st) +assert alignment(asn1_string_st) == 4, alignment(asn1_string_st) +ASN1_STRING = asn1_string_st +class ASN1_ENCODING_st(Structure): + pass +ASN1_ENCODING_st._fields_ = [ + ('enc', POINTER(c_ubyte)), + ('len', c_long), + ('modified', c_int), +] +assert sizeof(ASN1_ENCODING_st) == 12, sizeof(ASN1_ENCODING_st) +assert alignment(ASN1_ENCODING_st) == 4, alignment(ASN1_ENCODING_st) +ASN1_ENCODING = ASN1_ENCODING_st +class asn1_string_table_st(Structure): + pass +asn1_string_table_st._fields_ = [ + ('nid', c_int), + ('minsize', c_long), + ('maxsize', c_long), + ('mask', c_ulong), + ('flags', c_ulong), +] +assert sizeof(asn1_string_table_st) == 20, sizeof(asn1_string_table_st) +assert alignment(asn1_string_table_st) == 4, alignment(asn1_string_table_st) +ASN1_STRING_TABLE = asn1_string_table_st +class ASN1_TEMPLATE_st(Structure): + pass +ASN1_TEMPLATE_st._fields_ = [ +] +ASN1_TEMPLATE = ASN1_TEMPLATE_st +class ASN1_ITEM_st(Structure): + pass +ASN1_ITEM = ASN1_ITEM_st +ASN1_ITEM_st._fields_ = [ +] +class ASN1_TLC_st(Structure): + pass +ASN1_TLC = ASN1_TLC_st +ASN1_TLC_st._fields_ = [ +] +class ASN1_VALUE_st(Structure): + pass +ASN1_VALUE_st._fields_ = [ +] +ASN1_VALUE = ASN1_VALUE_st +ASN1_ITEM_EXP = ASN1_ITEM +class asn1_type_st(Structure): + pass +class N12asn1_type_st4DOLLAR_11E(Union): + pass +ASN1_BOOLEAN = c_int +ASN1_INTEGER = asn1_string_st +ASN1_ENUMERATED = asn1_string_st +ASN1_BIT_STRING = asn1_string_st +ASN1_OCTET_STRING = asn1_string_st +ASN1_PRINTABLESTRING = asn1_string_st +ASN1_T61STRING = asn1_string_st +ASN1_IA5STRING = asn1_string_st +ASN1_GENERALSTRING = asn1_string_st +ASN1_BMPSTRING = asn1_string_st +ASN1_UNIVERSALSTRING = asn1_string_st +ASN1_UTCTIME = asn1_string_st +ASN1_GENERALIZEDTIME = asn1_string_st +ASN1_VISIBLESTRING = asn1_string_st +ASN1_UTF8STRING = asn1_string_st +N12asn1_type_st4DOLLAR_11E._fields_ = [ + ('ptr', STRING), + ('boolean', ASN1_BOOLEAN), + ('asn1_string', POINTER(ASN1_STRING)), + ('object', POINTER(ASN1_OBJECT)), + ('integer', POINTER(ASN1_INTEGER)), + ('enumerated', POINTER(ASN1_ENUMERATED)), + ('bit_string', POINTER(ASN1_BIT_STRING)), + ('octet_string', POINTER(ASN1_OCTET_STRING)), + ('printablestring', POINTER(ASN1_PRINTABLESTRING)), + ('t61string', POINTER(ASN1_T61STRING)), + ('ia5string', POINTER(ASN1_IA5STRING)), + ('generalstring', POINTER(ASN1_GENERALSTRING)), + ('bmpstring', POINTER(ASN1_BMPSTRING)), + ('universalstring', POINTER(ASN1_UNIVERSALSTRING)), + ('utctime', POINTER(ASN1_UTCTIME)), + ('generalizedtime', POINTER(ASN1_GENERALIZEDTIME)), + ('visiblestring', POINTER(ASN1_VISIBLESTRING)), + ('utf8string', POINTER(ASN1_UTF8STRING)), + ('set', POINTER(ASN1_STRING)), + ('sequence', POINTER(ASN1_STRING)), +] +assert sizeof(N12asn1_type_st4DOLLAR_11E) == 4, sizeof(N12asn1_type_st4DOLLAR_11E) +assert alignment(N12asn1_type_st4DOLLAR_11E) == 4, alignment(N12asn1_type_st4DOLLAR_11E) +asn1_type_st._fields_ = [ + ('type', c_int), + ('value', N12asn1_type_st4DOLLAR_11E), +] +assert sizeof(asn1_type_st) == 8, sizeof(asn1_type_st) +assert alignment(asn1_type_st) == 4, alignment(asn1_type_st) +ASN1_TYPE = asn1_type_st +class asn1_method_st(Structure): + pass +asn1_method_st._fields_ = [ + ('i2d', CFUNCTYPE(c_int)), + ('d2i', CFUNCTYPE(STRING)), + ('create', CFUNCTYPE(STRING)), + ('destroy', CFUNCTYPE(None)), +] +assert sizeof(asn1_method_st) == 16, sizeof(asn1_method_st) +assert alignment(asn1_method_st) == 4, alignment(asn1_method_st) +ASN1_METHOD = asn1_method_st +class asn1_header_st(Structure): + pass +asn1_header_st._fields_ = [ + ('header', POINTER(ASN1_OCTET_STRING)), + ('data', STRING), + ('meth', POINTER(ASN1_METHOD)), +] +assert sizeof(asn1_header_st) == 12, sizeof(asn1_header_st) +assert alignment(asn1_header_st) == 4, alignment(asn1_header_st) +ASN1_HEADER = asn1_header_st +class BIT_STRING_BITNAME_st(Structure): + pass +BIT_STRING_BITNAME_st._fields_ = [ + ('bitnum', c_int), + ('lname', STRING), + ('sname', STRING), +] +assert sizeof(BIT_STRING_BITNAME_st) == 12, sizeof(BIT_STRING_BITNAME_st) +assert alignment(BIT_STRING_BITNAME_st) == 4, alignment(BIT_STRING_BITNAME_st) +BIT_STRING_BITNAME = BIT_STRING_BITNAME_st +class bio_st(Structure): + pass +BIO = bio_st +bio_info_cb = CFUNCTYPE(None, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long) +class bio_method_st(Structure): + pass +bio_method_st._fields_ = [ + ('type', c_int), + ('name', STRING), + ('bwrite', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), + ('bread', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), + ('bputs', CFUNCTYPE(c_int, POINTER(BIO), STRING)), + ('bgets', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)), + ('ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, c_long, c_void_p)), + ('create', CFUNCTYPE(c_int, POINTER(BIO))), + ('destroy', CFUNCTYPE(c_int, POINTER(BIO))), + ('callback_ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, POINTER(bio_info_cb))), +] +assert sizeof(bio_method_st) == 40, sizeof(bio_method_st) +assert alignment(bio_method_st) == 4, alignment(bio_method_st) +BIO_METHOD = bio_method_st +class crypto_ex_data_st(Structure): + pass +class stack_st(Structure): + pass +STACK = stack_st +crypto_ex_data_st._fields_ = [ + ('sk', POINTER(STACK)), + ('dummy', c_int), +] +assert sizeof(crypto_ex_data_st) == 8, sizeof(crypto_ex_data_st) +assert alignment(crypto_ex_data_st) == 4, alignment(crypto_ex_data_st) +CRYPTO_EX_DATA = crypto_ex_data_st +bio_st._fields_ = [ + ('method', POINTER(BIO_METHOD)), + ('callback', CFUNCTYPE(c_long, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long)), + ('cb_arg', STRING), + ('init', c_int), + ('shutdown', c_int), + ('flags', c_int), + ('retry_reason', c_int), + ('num', c_int), + ('ptr', c_void_p), + ('next_bio', POINTER(bio_st)), + ('prev_bio', POINTER(bio_st)), + ('references', c_int), + ('num_read', c_ulong), + ('num_write', c_ulong), + ('ex_data', CRYPTO_EX_DATA), +] +assert sizeof(bio_st) == 64, sizeof(bio_st) +assert alignment(bio_st) == 4, alignment(bio_st) +class bio_f_buffer_ctx_struct(Structure): + pass +bio_f_buffer_ctx_struct._fields_ = [ + ('ibuf_size', c_int), + ('obuf_size', c_int), + ('ibuf', STRING), + ('ibuf_len', c_int), + ('ibuf_off', c_int), + ('obuf', STRING), + ('obuf_len', c_int), + ('obuf_off', c_int), +] +assert sizeof(bio_f_buffer_ctx_struct) == 32, sizeof(bio_f_buffer_ctx_struct) +assert alignment(bio_f_buffer_ctx_struct) == 4, alignment(bio_f_buffer_ctx_struct) +BIO_F_BUFFER_CTX = bio_f_buffer_ctx_struct +class hostent(Structure): + pass +hostent._fields_ = [ +] +class bf_key_st(Structure): + pass +bf_key_st._fields_ = [ + ('P', c_uint * 18), + ('S', c_uint * 1024), +] +assert sizeof(bf_key_st) == 4168, sizeof(bf_key_st) +assert alignment(bf_key_st) == 4, alignment(bf_key_st) +BF_KEY = bf_key_st +class bignum_st(Structure): + pass +bignum_st._fields_ = [ + ('d', POINTER(c_ulong)), + ('top', c_int), + ('dmax', c_int), + ('neg', c_int), + ('flags', c_int), +] +assert sizeof(bignum_st) == 20, sizeof(bignum_st) +assert alignment(bignum_st) == 4, alignment(bignum_st) +BIGNUM = bignum_st +class bignum_ctx(Structure): + pass +bignum_ctx._fields_ = [ +] +BN_CTX = bignum_ctx +class bn_blinding_st(Structure): + pass +bn_blinding_st._fields_ = [ + ('init', c_int), + ('A', POINTER(BIGNUM)), + ('Ai', POINTER(BIGNUM)), + ('mod', POINTER(BIGNUM)), + ('thread_id', c_ulong), +] +assert sizeof(bn_blinding_st) == 20, sizeof(bn_blinding_st) +assert alignment(bn_blinding_st) == 4, alignment(bn_blinding_st) +BN_BLINDING = bn_blinding_st +class bn_mont_ctx_st(Structure): + pass +bn_mont_ctx_st._fields_ = [ + ('ri', c_int), + ('RR', BIGNUM), + ('N', BIGNUM), + ('Ni', BIGNUM), + ('n0', c_ulong), + ('flags', c_int), +] +assert sizeof(bn_mont_ctx_st) == 72, sizeof(bn_mont_ctx_st) +assert alignment(bn_mont_ctx_st) == 4, alignment(bn_mont_ctx_st) +BN_MONT_CTX = bn_mont_ctx_st +class bn_recp_ctx_st(Structure): + pass +bn_recp_ctx_st._fields_ = [ + ('N', BIGNUM), + ('Nr', BIGNUM), + ('num_bits', c_int), + ('shift', c_int), + ('flags', c_int), +] +assert sizeof(bn_recp_ctx_st) == 52, sizeof(bn_recp_ctx_st) +assert alignment(bn_recp_ctx_st) == 4, alignment(bn_recp_ctx_st) +BN_RECP_CTX = bn_recp_ctx_st +class buf_mem_st(Structure): + pass +buf_mem_st._fields_ = [ + ('length', c_int), + ('data', STRING), + ('max', c_int), +] +assert sizeof(buf_mem_st) == 12, sizeof(buf_mem_st) +assert alignment(buf_mem_st) == 4, alignment(buf_mem_st) +BUF_MEM = buf_mem_st +class cast_key_st(Structure): + pass +cast_key_st._fields_ = [ + ('data', c_ulong * 32), + ('short_key', c_int), +] +assert sizeof(cast_key_st) == 132, sizeof(cast_key_st) +assert alignment(cast_key_st) == 4, alignment(cast_key_st) +CAST_KEY = cast_key_st +class comp_method_st(Structure): + pass +comp_method_st._fields_ = [ + ('type', c_int), + ('name', STRING), + ('init', CFUNCTYPE(c_int)), + ('finish', CFUNCTYPE(None)), + ('compress', CFUNCTYPE(c_int)), + ('expand', CFUNCTYPE(c_int)), + ('ctrl', CFUNCTYPE(c_long)), + ('callback_ctrl', CFUNCTYPE(c_long)), +] +assert sizeof(comp_method_st) == 32, sizeof(comp_method_st) +assert alignment(comp_method_st) == 4, alignment(comp_method_st) +COMP_METHOD = comp_method_st +class comp_ctx_st(Structure): + pass +comp_ctx_st._fields_ = [ + ('meth', POINTER(COMP_METHOD)), + ('compress_in', c_ulong), + ('compress_out', c_ulong), + ('expand_in', c_ulong), + ('expand_out', c_ulong), + ('ex_data', CRYPTO_EX_DATA), +] +assert sizeof(comp_ctx_st) == 28, sizeof(comp_ctx_st) +assert alignment(comp_ctx_st) == 4, alignment(comp_ctx_st) +COMP_CTX = comp_ctx_st +class CRYPTO_dynlock_value(Structure): + pass +CRYPTO_dynlock_value._fields_ = [ +] +class CRYPTO_dynlock(Structure): + pass +CRYPTO_dynlock._fields_ = [ + ('references', c_int), + ('data', POINTER(CRYPTO_dynlock_value)), +] +assert sizeof(CRYPTO_dynlock) == 8, sizeof(CRYPTO_dynlock) +assert alignment(CRYPTO_dynlock) == 4, alignment(CRYPTO_dynlock) +BIO_dummy = bio_st +CRYPTO_EX_new = CFUNCTYPE(c_int, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p) +CRYPTO_EX_free = CFUNCTYPE(None, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p) +CRYPTO_EX_dup = CFUNCTYPE(c_int, POINTER(CRYPTO_EX_DATA), POINTER(CRYPTO_EX_DATA), c_void_p, c_int, c_long, c_void_p) +class crypto_ex_data_func_st(Structure): + pass +crypto_ex_data_func_st._fields_ = [ + ('argl', c_long), + ('argp', c_void_p), + ('new_func', POINTER(CRYPTO_EX_new)), + ('free_func', POINTER(CRYPTO_EX_free)), + ('dup_func', POINTER(CRYPTO_EX_dup)), +] +assert sizeof(crypto_ex_data_func_st) == 20, sizeof(crypto_ex_data_func_st) +assert alignment(crypto_ex_data_func_st) == 4, alignment(crypto_ex_data_func_st) +CRYPTO_EX_DATA_FUNCS = crypto_ex_data_func_st +class st_CRYPTO_EX_DATA_IMPL(Structure): + pass +CRYPTO_EX_DATA_IMPL = st_CRYPTO_EX_DATA_IMPL +st_CRYPTO_EX_DATA_IMPL._fields_ = [ +] +CRYPTO_MEM_LEAK_CB = CFUNCTYPE(c_void_p, c_ulong, STRING, c_int, c_int, c_void_p) +DES_cblock = c_ubyte * 8 +const_DES_cblock = c_ubyte * 8 +class DES_ks(Structure): + pass +class N6DES_ks3DOLLAR_9E(Union): + pass +N6DES_ks3DOLLAR_9E._fields_ = [ + ('cblock', DES_cblock), + ('deslong', c_ulong * 2), +] +assert sizeof(N6DES_ks3DOLLAR_9E) == 8, sizeof(N6DES_ks3DOLLAR_9E) +assert alignment(N6DES_ks3DOLLAR_9E) == 4, alignment(N6DES_ks3DOLLAR_9E) +DES_ks._fields_ = [ + ('ks', N6DES_ks3DOLLAR_9E * 16), +] +assert sizeof(DES_ks) == 128, sizeof(DES_ks) +assert alignment(DES_ks) == 4, alignment(DES_ks) +DES_key_schedule = DES_ks +_ossl_old_des_cblock = c_ubyte * 8 +class _ossl_old_des_ks_struct(Structure): + pass +class N23_ossl_old_des_ks_struct4DOLLAR_10E(Union): + pass +N23_ossl_old_des_ks_struct4DOLLAR_10E._fields_ = [ + ('_', _ossl_old_des_cblock), + ('pad', c_ulong * 2), +] +assert sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 8, sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E) +assert alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 4, alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E) +_ossl_old_des_ks_struct._fields_ = [ + ('ks', N23_ossl_old_des_ks_struct4DOLLAR_10E), +] +assert sizeof(_ossl_old_des_ks_struct) == 8, sizeof(_ossl_old_des_ks_struct) +assert alignment(_ossl_old_des_ks_struct) == 4, alignment(_ossl_old_des_ks_struct) +_ossl_old_des_key_schedule = _ossl_old_des_ks_struct * 16 +class dh_st(Structure): + pass +DH = dh_st +class dh_method(Structure): + pass +dh_method._fields_ = [ + ('name', STRING), + ('generate_key', CFUNCTYPE(c_int, POINTER(DH))), + ('compute_key', CFUNCTYPE(c_int, POINTER(c_ubyte), POINTER(BIGNUM), POINTER(DH))), + ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DH), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), + ('init', CFUNCTYPE(c_int, POINTER(DH))), + ('finish', CFUNCTYPE(c_int, POINTER(DH))), + ('flags', c_int), + ('app_data', STRING), +] +assert sizeof(dh_method) == 32, sizeof(dh_method) +assert alignment(dh_method) == 4, alignment(dh_method) +DH_METHOD = dh_method +class engine_st(Structure): + pass +ENGINE = engine_st +dh_st._fields_ = [ + ('pad', c_int), + ('version', c_int), + ('p', POINTER(BIGNUM)), + ('g', POINTER(BIGNUM)), + ('length', c_long), + ('pub_key', POINTER(BIGNUM)), + ('priv_key', POINTER(BIGNUM)), + ('flags', c_int), + ('method_mont_p', STRING), + ('q', POINTER(BIGNUM)), + ('j', POINTER(BIGNUM)), + ('seed', POINTER(c_ubyte)), + ('seedlen', c_int), + ('counter', POINTER(BIGNUM)), + ('references', c_int), + ('ex_data', CRYPTO_EX_DATA), + ('meth', POINTER(DH_METHOD)), + ('engine', POINTER(ENGINE)), +] +assert sizeof(dh_st) == 76, sizeof(dh_st) +assert alignment(dh_st) == 4, alignment(dh_st) +class dsa_st(Structure): + pass +DSA = dsa_st +class DSA_SIG_st(Structure): + pass +DSA_SIG_st._fields_ = [ + ('r', POINTER(BIGNUM)), + ('s', POINTER(BIGNUM)), +] +assert sizeof(DSA_SIG_st) == 8, sizeof(DSA_SIG_st) +assert alignment(DSA_SIG_st) == 4, alignment(DSA_SIG_st) +DSA_SIG = DSA_SIG_st +class dsa_method(Structure): + pass +dsa_method._fields_ = [ + ('name', STRING), + ('dsa_do_sign', CFUNCTYPE(POINTER(DSA_SIG), POINTER(c_ubyte), c_int, POINTER(DSA))), + ('dsa_sign_setup', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BN_CTX), POINTER(POINTER(BIGNUM)), POINTER(POINTER(BIGNUM)))), + ('dsa_do_verify', CFUNCTYPE(c_int, POINTER(c_ubyte), c_int, POINTER(DSA_SIG), POINTER(DSA))), + ('dsa_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), + ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), + ('init', CFUNCTYPE(c_int, POINTER(DSA))), + ('finish', CFUNCTYPE(c_int, POINTER(DSA))), + ('flags', c_int), + ('app_data', STRING), +] +assert sizeof(dsa_method) == 40, sizeof(dsa_method) +assert alignment(dsa_method) == 4, alignment(dsa_method) +DSA_METHOD = dsa_method +dsa_st._fields_ = [ + ('pad', c_int), + ('version', c_long), + ('write_params', c_int), + ('p', POINTER(BIGNUM)), + ('q', POINTER(BIGNUM)), + ('g', POINTER(BIGNUM)), + ('pub_key', POINTER(BIGNUM)), + ('priv_key', POINTER(BIGNUM)), + ('kinv', POINTER(BIGNUM)), + ('r', POINTER(BIGNUM)), + ('flags', c_int), + ('method_mont_p', STRING), + ('references', c_int), + ('ex_data', CRYPTO_EX_DATA), + ('meth', POINTER(DSA_METHOD)), + ('engine', POINTER(ENGINE)), +] +assert sizeof(dsa_st) == 68, sizeof(dsa_st) +assert alignment(dsa_st) == 4, alignment(dsa_st) +class evp_pkey_st(Structure): + pass +class N11evp_pkey_st4DOLLAR_12E(Union): + pass +class rsa_st(Structure): + pass +N11evp_pkey_st4DOLLAR_12E._fields_ = [ + ('ptr', STRING), + ('rsa', POINTER(rsa_st)), + ('dsa', POINTER(dsa_st)), + ('dh', POINTER(dh_st)), +] +assert sizeof(N11evp_pkey_st4DOLLAR_12E) == 4, sizeof(N11evp_pkey_st4DOLLAR_12E) +assert alignment(N11evp_pkey_st4DOLLAR_12E) == 4, alignment(N11evp_pkey_st4DOLLAR_12E) +evp_pkey_st._fields_ = [ + ('type', c_int), + ('save_type', c_int), + ('references', c_int), + ('pkey', N11evp_pkey_st4DOLLAR_12E), + ('save_parameters', c_int), + ('attributes', POINTER(STACK)), +] +assert sizeof(evp_pkey_st) == 24, sizeof(evp_pkey_st) +assert alignment(evp_pkey_st) == 4, alignment(evp_pkey_st) +class env_md_st(Structure): + pass +class env_md_ctx_st(Structure): + pass +EVP_MD_CTX = env_md_ctx_st +env_md_st._fields_ = [ + ('type', c_int), + ('pkey_type', c_int), + ('md_size', c_int), + ('flags', c_ulong), + ('init', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))), + ('update', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), c_void_p, c_ulong)), + ('final', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(c_ubyte))), + ('copy', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(EVP_MD_CTX))), + ('cleanup', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))), + ('sign', CFUNCTYPE(c_int)), + ('verify', CFUNCTYPE(c_int)), + ('required_pkey_type', c_int * 5), + ('block_size', c_int), + ('ctx_size', c_int), +] +assert sizeof(env_md_st) == 72, sizeof(env_md_st) +assert alignment(env_md_st) == 4, alignment(env_md_st) +EVP_MD = env_md_st +env_md_ctx_st._fields_ = [ + ('digest', POINTER(EVP_MD)), + ('engine', POINTER(ENGINE)), + ('flags', c_ulong), + ('md_data', c_void_p), +] +assert sizeof(env_md_ctx_st) == 16, sizeof(env_md_ctx_st) +assert alignment(env_md_ctx_st) == 4, alignment(env_md_ctx_st) +class evp_cipher_st(Structure): + pass +class evp_cipher_ctx_st(Structure): + pass +EVP_CIPHER_CTX = evp_cipher_ctx_st +evp_cipher_st._fields_ = [ + ('nid', c_int), + ('block_size', c_int), + ('key_len', c_int), + ('iv_len', c_int), + ('flags', c_ulong), + ('init', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_int)), + ('do_cipher', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_uint)), + ('cleanup', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX))), + ('ctx_size', c_int), + ('set_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))), + ('get_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))), + ('ctrl', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), c_int, c_int, c_void_p)), + ('app_data', c_void_p), +] +assert sizeof(evp_cipher_st) == 52, sizeof(evp_cipher_st) +assert alignment(evp_cipher_st) == 4, alignment(evp_cipher_st) +class evp_cipher_info_st(Structure): + pass +EVP_CIPHER = evp_cipher_st +evp_cipher_info_st._fields_ = [ + ('cipher', POINTER(EVP_CIPHER)), + ('iv', c_ubyte * 16), +] +assert sizeof(evp_cipher_info_st) == 20, sizeof(evp_cipher_info_st) +assert alignment(evp_cipher_info_st) == 4, alignment(evp_cipher_info_st) +EVP_CIPHER_INFO = evp_cipher_info_st +evp_cipher_ctx_st._fields_ = [ + ('cipher', POINTER(EVP_CIPHER)), + ('engine', POINTER(ENGINE)), + ('encrypt', c_int), + ('buf_len', c_int), + ('oiv', c_ubyte * 16), + ('iv', c_ubyte * 16), + ('buf', c_ubyte * 32), + ('num', c_int), + ('app_data', c_void_p), + ('key_len', c_int), + ('flags', c_ulong), + ('cipher_data', c_void_p), + ('final_used', c_int), + ('block_mask', c_int), + ('final', c_ubyte * 32), +] +assert sizeof(evp_cipher_ctx_st) == 140, sizeof(evp_cipher_ctx_st) +assert alignment(evp_cipher_ctx_st) == 4, alignment(evp_cipher_ctx_st) +class evp_Encode_Ctx_st(Structure): + pass +evp_Encode_Ctx_st._fields_ = [ + ('num', c_int), + ('length', c_int), + ('enc_data', c_ubyte * 80), + ('line_num', c_int), + ('expect_nl', c_int), +] +assert sizeof(evp_Encode_Ctx_st) == 96, sizeof(evp_Encode_Ctx_st) +assert alignment(evp_Encode_Ctx_st) == 4, alignment(evp_Encode_Ctx_st) +EVP_ENCODE_CTX = evp_Encode_Ctx_st +EVP_PBE_KEYGEN = CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), STRING, c_int, POINTER(ASN1_TYPE), POINTER(EVP_CIPHER), POINTER(EVP_MD), c_int) +class lhash_node_st(Structure): + pass +lhash_node_st._fields_ = [ + ('data', c_void_p), + ('next', POINTER(lhash_node_st)), + ('hash', c_ulong), +] +assert sizeof(lhash_node_st) == 12, sizeof(lhash_node_st) +assert alignment(lhash_node_st) == 4, alignment(lhash_node_st) +LHASH_NODE = lhash_node_st +LHASH_COMP_FN_TYPE = CFUNCTYPE(c_int, c_void_p, c_void_p) +LHASH_HASH_FN_TYPE = CFUNCTYPE(c_ulong, c_void_p) +LHASH_DOALL_FN_TYPE = CFUNCTYPE(None, c_void_p) +LHASH_DOALL_ARG_FN_TYPE = CFUNCTYPE(None, c_void_p, c_void_p) +class lhash_st(Structure): + pass +lhash_st._fields_ = [ + ('b', POINTER(POINTER(LHASH_NODE))), + ('comp', LHASH_COMP_FN_TYPE), + ('hash', LHASH_HASH_FN_TYPE), + ('num_nodes', c_uint), + ('num_alloc_nodes', c_uint), + ('p', c_uint), + ('pmax', c_uint), + ('up_load', c_ulong), + ('down_load', c_ulong), + ('num_items', c_ulong), + ('num_expands', c_ulong), + ('num_expand_reallocs', c_ulong), + ('num_contracts', c_ulong), + ('num_contract_reallocs', c_ulong), + ('num_hash_calls', c_ulong), + ('num_comp_calls', c_ulong), + ('num_insert', c_ulong), + ('num_replace', c_ulong), + ('num_delete', c_ulong), + ('num_no_delete', c_ulong), + ('num_retrieve', c_ulong), + ('num_retrieve_miss', c_ulong), + ('num_hash_comps', c_ulong), + ('error', c_int), +] +assert sizeof(lhash_st) == 96, sizeof(lhash_st) +assert alignment(lhash_st) == 4, alignment(lhash_st) +LHASH = lhash_st +class MD2state_st(Structure): + pass +MD2state_st._fields_ = [ + ('num', c_int), + ('data', c_ubyte * 16), + ('cksm', c_uint * 16), + ('state', c_uint * 16), +] +assert sizeof(MD2state_st) == 148, sizeof(MD2state_st) +assert alignment(MD2state_st) == 4, alignment(MD2state_st) +MD2_CTX = MD2state_st +class MD4state_st(Structure): + pass +MD4state_st._fields_ = [ + ('A', c_uint), + ('B', c_uint), + ('C', c_uint), + ('D', c_uint), + ('Nl', c_uint), + ('Nh', c_uint), + ('data', c_uint * 16), + ('num', c_int), +] +assert sizeof(MD4state_st) == 92, sizeof(MD4state_st) +assert alignment(MD4state_st) == 4, alignment(MD4state_st) +MD4_CTX = MD4state_st +class MD5state_st(Structure): + pass +MD5state_st._fields_ = [ + ('A', c_uint), + ('B', c_uint), + ('C', c_uint), + ('D', c_uint), + ('Nl', c_uint), + ('Nh', c_uint), + ('data', c_uint * 16), + ('num', c_int), +] +assert sizeof(MD5state_st) == 92, sizeof(MD5state_st) +assert alignment(MD5state_st) == 4, alignment(MD5state_st) +MD5_CTX = MD5state_st +class mdc2_ctx_st(Structure): + pass +mdc2_ctx_st._fields_ = [ + ('num', c_int), + ('data', c_ubyte * 8), + ('h', DES_cblock), + ('hh', DES_cblock), + ('pad_type', c_int), +] +assert sizeof(mdc2_ctx_st) == 32, sizeof(mdc2_ctx_st) +assert alignment(mdc2_ctx_st) == 4, alignment(mdc2_ctx_st) +MDC2_CTX = mdc2_ctx_st +class obj_name_st(Structure): + pass +obj_name_st._fields_ = [ + ('type', c_int), + ('alias', c_int), + ('name', STRING), + ('data', STRING), +] +assert sizeof(obj_name_st) == 16, sizeof(obj_name_st) +assert alignment(obj_name_st) == 4, alignment(obj_name_st) +OBJ_NAME = obj_name_st +ASN1_TIME = asn1_string_st +ASN1_NULL = c_int +EVP_PKEY = evp_pkey_st +class x509_st(Structure): + pass +X509 = x509_st +class X509_algor_st(Structure): + pass +X509_ALGOR = X509_algor_st +class X509_crl_st(Structure): + pass +X509_CRL = X509_crl_st +class X509_name_st(Structure): + pass +X509_NAME = X509_name_st +class x509_store_st(Structure): + pass +X509_STORE = x509_store_st +class x509_store_ctx_st(Structure): + pass +X509_STORE_CTX = x509_store_ctx_st +engine_st._fields_ = [ +] +class PEM_Encode_Seal_st(Structure): + pass +PEM_Encode_Seal_st._fields_ = [ + ('encode', EVP_ENCODE_CTX), + ('md', EVP_MD_CTX), + ('cipher', EVP_CIPHER_CTX), +] +assert sizeof(PEM_Encode_Seal_st) == 252, sizeof(PEM_Encode_Seal_st) +assert alignment(PEM_Encode_Seal_st) == 4, alignment(PEM_Encode_Seal_st) +PEM_ENCODE_SEAL_CTX = PEM_Encode_Seal_st +class pem_recip_st(Structure): + pass +pem_recip_st._fields_ = [ + ('name', STRING), + ('dn', POINTER(X509_NAME)), + ('cipher', c_int), + ('key_enc', c_int), +] +assert sizeof(pem_recip_st) == 16, sizeof(pem_recip_st) +assert alignment(pem_recip_st) == 4, alignment(pem_recip_st) +PEM_USER = pem_recip_st +class pem_ctx_st(Structure): + pass +class N10pem_ctx_st4DOLLAR_16E(Structure): + pass +N10pem_ctx_st4DOLLAR_16E._fields_ = [ + ('version', c_int), + ('mode', c_int), +] +assert sizeof(N10pem_ctx_st4DOLLAR_16E) == 8, sizeof(N10pem_ctx_st4DOLLAR_16E) +assert alignment(N10pem_ctx_st4DOLLAR_16E) == 4, alignment(N10pem_ctx_st4DOLLAR_16E) +class N10pem_ctx_st4DOLLAR_17E(Structure): + pass +N10pem_ctx_st4DOLLAR_17E._fields_ = [ + ('cipher', c_int), +] +assert sizeof(N10pem_ctx_st4DOLLAR_17E) == 4, sizeof(N10pem_ctx_st4DOLLAR_17E) +assert alignment(N10pem_ctx_st4DOLLAR_17E) == 4, alignment(N10pem_ctx_st4DOLLAR_17E) +pem_ctx_st._fields_ = [ + ('type', c_int), + ('proc_type', N10pem_ctx_st4DOLLAR_16E), + ('domain', STRING), + ('DEK_info', N10pem_ctx_st4DOLLAR_17E), + ('originator', POINTER(PEM_USER)), + ('num_recipient', c_int), + ('recipient', POINTER(POINTER(PEM_USER))), + ('x509_chain', POINTER(STACK)), + ('md', POINTER(EVP_MD)), + ('md_enc', c_int), + ('md_len', c_int), + ('md_data', STRING), + ('dec', POINTER(EVP_CIPHER)), + ('key_len', c_int), + ('key', POINTER(c_ubyte)), + ('data_enc', c_int), + ('data_len', c_int), + ('data', POINTER(c_ubyte)), +] +assert sizeof(pem_ctx_st) == 76, sizeof(pem_ctx_st) +assert alignment(pem_ctx_st) == 4, alignment(pem_ctx_st) +PEM_CTX = pem_ctx_st +pem_password_cb = CFUNCTYPE(c_int, STRING, c_int, c_int, c_void_p) +class pkcs7_issuer_and_serial_st(Structure): + pass +pkcs7_issuer_and_serial_st._fields_ = [ + ('issuer', POINTER(X509_NAME)), + ('serial', POINTER(ASN1_INTEGER)), +] +assert sizeof(pkcs7_issuer_and_serial_st) == 8, sizeof(pkcs7_issuer_and_serial_st) +assert alignment(pkcs7_issuer_and_serial_st) == 4, alignment(pkcs7_issuer_and_serial_st) +PKCS7_ISSUER_AND_SERIAL = pkcs7_issuer_and_serial_st +class pkcs7_signer_info_st(Structure): + pass +pkcs7_signer_info_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)), + ('digest_alg', POINTER(X509_ALGOR)), + ('auth_attr', POINTER(STACK)), + ('digest_enc_alg', POINTER(X509_ALGOR)), + ('enc_digest', POINTER(ASN1_OCTET_STRING)), + ('unauth_attr', POINTER(STACK)), + ('pkey', POINTER(EVP_PKEY)), +] +assert sizeof(pkcs7_signer_info_st) == 32, sizeof(pkcs7_signer_info_st) +assert alignment(pkcs7_signer_info_st) == 4, alignment(pkcs7_signer_info_st) +PKCS7_SIGNER_INFO = pkcs7_signer_info_st +class pkcs7_recip_info_st(Structure): + pass +pkcs7_recip_info_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)), + ('key_enc_algor', POINTER(X509_ALGOR)), + ('enc_key', POINTER(ASN1_OCTET_STRING)), + ('cert', POINTER(X509)), +] +assert sizeof(pkcs7_recip_info_st) == 20, sizeof(pkcs7_recip_info_st) +assert alignment(pkcs7_recip_info_st) == 4, alignment(pkcs7_recip_info_st) +PKCS7_RECIP_INFO = pkcs7_recip_info_st +class pkcs7_signed_st(Structure): + pass +class pkcs7_st(Structure): + pass +pkcs7_signed_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('md_algs', POINTER(STACK)), + ('cert', POINTER(STACK)), + ('crl', POINTER(STACK)), + ('signer_info', POINTER(STACK)), + ('contents', POINTER(pkcs7_st)), +] +assert sizeof(pkcs7_signed_st) == 24, sizeof(pkcs7_signed_st) +assert alignment(pkcs7_signed_st) == 4, alignment(pkcs7_signed_st) +PKCS7_SIGNED = pkcs7_signed_st +class pkcs7_enc_content_st(Structure): + pass +pkcs7_enc_content_st._fields_ = [ + ('content_type', POINTER(ASN1_OBJECT)), + ('algorithm', POINTER(X509_ALGOR)), + ('enc_data', POINTER(ASN1_OCTET_STRING)), + ('cipher', POINTER(EVP_CIPHER)), +] +assert sizeof(pkcs7_enc_content_st) == 16, sizeof(pkcs7_enc_content_st) +assert alignment(pkcs7_enc_content_st) == 4, alignment(pkcs7_enc_content_st) +PKCS7_ENC_CONTENT = pkcs7_enc_content_st +class pkcs7_enveloped_st(Structure): + pass +pkcs7_enveloped_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('recipientinfo', POINTER(STACK)), + ('enc_data', POINTER(PKCS7_ENC_CONTENT)), +] +assert sizeof(pkcs7_enveloped_st) == 12, sizeof(pkcs7_enveloped_st) +assert alignment(pkcs7_enveloped_st) == 4, alignment(pkcs7_enveloped_st) +PKCS7_ENVELOPE = pkcs7_enveloped_st +class pkcs7_signedandenveloped_st(Structure): + pass +pkcs7_signedandenveloped_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('md_algs', POINTER(STACK)), + ('cert', POINTER(STACK)), + ('crl', POINTER(STACK)), + ('signer_info', POINTER(STACK)), + ('enc_data', POINTER(PKCS7_ENC_CONTENT)), + ('recipientinfo', POINTER(STACK)), +] +assert sizeof(pkcs7_signedandenveloped_st) == 28, sizeof(pkcs7_signedandenveloped_st) +assert alignment(pkcs7_signedandenveloped_st) == 4, alignment(pkcs7_signedandenveloped_st) +PKCS7_SIGN_ENVELOPE = pkcs7_signedandenveloped_st +class pkcs7_digest_st(Structure): + pass +pkcs7_digest_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('md', POINTER(X509_ALGOR)), + ('contents', POINTER(pkcs7_st)), + ('digest', POINTER(ASN1_OCTET_STRING)), +] +assert sizeof(pkcs7_digest_st) == 16, sizeof(pkcs7_digest_st) +assert alignment(pkcs7_digest_st) == 4, alignment(pkcs7_digest_st) +PKCS7_DIGEST = pkcs7_digest_st +class pkcs7_encrypted_st(Structure): + pass +pkcs7_encrypted_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('enc_data', POINTER(PKCS7_ENC_CONTENT)), +] +assert sizeof(pkcs7_encrypted_st) == 8, sizeof(pkcs7_encrypted_st) +assert alignment(pkcs7_encrypted_st) == 4, alignment(pkcs7_encrypted_st) +PKCS7_ENCRYPT = pkcs7_encrypted_st +class N8pkcs7_st4DOLLAR_15E(Union): + pass +N8pkcs7_st4DOLLAR_15E._fields_ = [ + ('ptr', STRING), + ('data', POINTER(ASN1_OCTET_STRING)), + ('sign', POINTER(PKCS7_SIGNED)), + ('enveloped', POINTER(PKCS7_ENVELOPE)), + ('signed_and_enveloped', POINTER(PKCS7_SIGN_ENVELOPE)), + ('digest', POINTER(PKCS7_DIGEST)), + ('encrypted', POINTER(PKCS7_ENCRYPT)), + ('other', POINTER(ASN1_TYPE)), +] +assert sizeof(N8pkcs7_st4DOLLAR_15E) == 4, sizeof(N8pkcs7_st4DOLLAR_15E) +assert alignment(N8pkcs7_st4DOLLAR_15E) == 4, alignment(N8pkcs7_st4DOLLAR_15E) +pkcs7_st._fields_ = [ + ('asn1', POINTER(c_ubyte)), + ('length', c_long), + ('state', c_int), + ('detached', c_int), + ('type', POINTER(ASN1_OBJECT)), + ('d', N8pkcs7_st4DOLLAR_15E), +] +assert sizeof(pkcs7_st) == 24, sizeof(pkcs7_st) +assert alignment(pkcs7_st) == 4, alignment(pkcs7_st) +PKCS7 = pkcs7_st +class rc2_key_st(Structure): + pass +rc2_key_st._fields_ = [ + ('data', c_uint * 64), +] +assert sizeof(rc2_key_st) == 256, sizeof(rc2_key_st) +assert alignment(rc2_key_st) == 4, alignment(rc2_key_st) +RC2_KEY = rc2_key_st +class rc4_key_st(Structure): + pass +rc4_key_st._fields_ = [ + ('x', c_ubyte), + ('y', c_ubyte), + ('data', c_ubyte * 256), +] +assert sizeof(rc4_key_st) == 258, sizeof(rc4_key_st) +assert alignment(rc4_key_st) == 1, alignment(rc4_key_st) +RC4_KEY = rc4_key_st +class rc5_key_st(Structure): + pass +rc5_key_st._fields_ = [ + ('rounds', c_int), + ('data', c_ulong * 34), +] +assert sizeof(rc5_key_st) == 140, sizeof(rc5_key_st) +assert alignment(rc5_key_st) == 4, alignment(rc5_key_st) +RC5_32_KEY = rc5_key_st +class RIPEMD160state_st(Structure): + pass +RIPEMD160state_st._fields_ = [ + ('A', c_uint), + ('B', c_uint), + ('C', c_uint), + ('D', c_uint), + ('E', c_uint), + ('Nl', c_uint), + ('Nh', c_uint), + ('data', c_uint * 16), + ('num', c_int), +] +assert sizeof(RIPEMD160state_st) == 96, sizeof(RIPEMD160state_st) +assert alignment(RIPEMD160state_st) == 4, alignment(RIPEMD160state_st) +RIPEMD160_CTX = RIPEMD160state_st +RSA = rsa_st +class rsa_meth_st(Structure): + pass +rsa_meth_st._fields_ = [ + ('name', STRING), + ('rsa_pub_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), + ('rsa_pub_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), + ('rsa_priv_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), + ('rsa_priv_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)), + ('rsa_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(RSA))), + ('bn_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))), + ('init', CFUNCTYPE(c_int, POINTER(RSA))), + ('finish', CFUNCTYPE(c_int, POINTER(RSA))), + ('flags', c_int), + ('app_data', STRING), + ('rsa_sign', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), POINTER(c_uint), POINTER(RSA))), + ('rsa_verify', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), c_uint, POINTER(RSA))), +] +assert sizeof(rsa_meth_st) == 52, sizeof(rsa_meth_st) +assert alignment(rsa_meth_st) == 4, alignment(rsa_meth_st) +RSA_METHOD = rsa_meth_st +rsa_st._fields_ = [ + ('pad', c_int), + ('version', c_long), + ('meth', POINTER(RSA_METHOD)), + ('engine', POINTER(ENGINE)), + ('n', POINTER(BIGNUM)), + ('e', POINTER(BIGNUM)), + ('d', POINTER(BIGNUM)), + ('p', POINTER(BIGNUM)), + ('q', POINTER(BIGNUM)), + ('dmp1', POINTER(BIGNUM)), + ('dmq1', POINTER(BIGNUM)), + ('iqmp', POINTER(BIGNUM)), + ('ex_data', CRYPTO_EX_DATA), + ('references', c_int), + ('flags', c_int), + ('_method_mod_n', POINTER(BN_MONT_CTX)), + ('_method_mod_p', POINTER(BN_MONT_CTX)), + ('_method_mod_q', POINTER(BN_MONT_CTX)), + ('bignum_data', STRING), + ('blinding', POINTER(BN_BLINDING)), +] +assert sizeof(rsa_st) == 84, sizeof(rsa_st) +assert alignment(rsa_st) == 4, alignment(rsa_st) +openssl_fptr = CFUNCTYPE(None) +class SHAstate_st(Structure): + pass +SHAstate_st._fields_ = [ + ('h0', c_uint), + ('h1', c_uint), + ('h2', c_uint), + ('h3', c_uint), + ('h4', c_uint), + ('Nl', c_uint), + ('Nh', c_uint), + ('data', c_uint * 16), + ('num', c_int), +] +assert sizeof(SHAstate_st) == 96, sizeof(SHAstate_st) +assert alignment(SHAstate_st) == 4, alignment(SHAstate_st) +SHA_CTX = SHAstate_st +class ssl_st(Structure): + pass +ssl_crock_st = POINTER(ssl_st) +class ssl_cipher_st(Structure): + pass +ssl_cipher_st._fields_ = [ + ('valid', c_int), + ('name', STRING), + ('id', c_ulong), + ('algorithms', c_ulong), + ('algo_strength', c_ulong), + ('algorithm2', c_ulong), + ('strength_bits', c_int), + ('alg_bits', c_int), + ('mask', c_ulong), + ('mask_strength', c_ulong), +] +assert sizeof(ssl_cipher_st) == 40, sizeof(ssl_cipher_st) +assert alignment(ssl_cipher_st) == 4, alignment(ssl_cipher_st) +SSL_CIPHER = ssl_cipher_st +SSL = ssl_st +class ssl_ctx_st(Structure): + pass +SSL_CTX = ssl_ctx_st +class ssl_method_st(Structure): + pass +class ssl3_enc_method(Structure): + pass +ssl_method_st._fields_ = [ + ('version', c_int), + ('ssl_new', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_clear', CFUNCTYPE(None, POINTER(SSL))), + ('ssl_free', CFUNCTYPE(None, POINTER(SSL))), + ('ssl_accept', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_connect', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_read', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), + ('ssl_peek', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), + ('ssl_write', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)), + ('ssl_shutdown', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_renegotiate', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_renegotiate_check', CFUNCTYPE(c_int, POINTER(SSL))), + ('ssl_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, c_long, c_void_p)), + ('ssl_ctx_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, c_long, c_void_p)), + ('get_cipher_by_char', CFUNCTYPE(POINTER(SSL_CIPHER), POINTER(c_ubyte))), + ('put_cipher_by_char', CFUNCTYPE(c_int, POINTER(SSL_CIPHER), POINTER(c_ubyte))), + ('ssl_pending', CFUNCTYPE(c_int, POINTER(SSL))), + ('num_ciphers', CFUNCTYPE(c_int)), + ('get_cipher', CFUNCTYPE(POINTER(SSL_CIPHER), c_uint)), + ('get_ssl_method', CFUNCTYPE(POINTER(ssl_method_st), c_int)), + ('get_timeout', CFUNCTYPE(c_long)), + ('ssl3_enc', POINTER(ssl3_enc_method)), + ('ssl_version', CFUNCTYPE(c_int)), + ('ssl_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, CFUNCTYPE(None))), + ('ssl_ctx_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, CFUNCTYPE(None))), +] +assert sizeof(ssl_method_st) == 100, sizeof(ssl_method_st) +assert alignment(ssl_method_st) == 4, alignment(ssl_method_st) +ssl3_enc_method._fields_ = [ +] +SSL_METHOD = ssl_method_st +class ssl_session_st(Structure): + pass +class sess_cert_st(Structure): + pass +ssl_session_st._fields_ = [ + ('ssl_version', c_int), + ('key_arg_length', c_uint), + ('key_arg', c_ubyte * 8), + ('master_key_length', c_int), + ('master_key', c_ubyte * 48), + ('session_id_length', c_uint), + ('session_id', c_ubyte * 32), + ('sid_ctx_length', c_uint), + ('sid_ctx', c_ubyte * 32), + ('not_resumable', c_int), + ('sess_cert', POINTER(sess_cert_st)), + ('peer', POINTER(X509)), + ('verify_result', c_long), + ('references', c_int), + ('timeout', c_long), + ('time', c_long), + ('compress_meth', c_int), + ('cipher', POINTER(SSL_CIPHER)), + ('cipher_id', c_ulong), + ('ciphers', POINTER(STACK)), + ('ex_data', CRYPTO_EX_DATA), + ('prev', POINTER(ssl_session_st)), + ('next', POINTER(ssl_session_st)), +] +assert sizeof(ssl_session_st) == 200, sizeof(ssl_session_st) +assert alignment(ssl_session_st) == 4, alignment(ssl_session_st) +sess_cert_st._fields_ = [ +] +SSL_SESSION = ssl_session_st +GEN_SESSION_CB = CFUNCTYPE(c_int, POINTER(SSL), POINTER(c_ubyte), POINTER(c_uint)) +class ssl_comp_st(Structure): + pass +ssl_comp_st._fields_ = [ + ('id', c_int), + ('name', STRING), + ('method', POINTER(COMP_METHOD)), +] +assert sizeof(ssl_comp_st) == 12, sizeof(ssl_comp_st) +assert alignment(ssl_comp_st) == 4, alignment(ssl_comp_st) +SSL_COMP = ssl_comp_st +class N10ssl_ctx_st4DOLLAR_18E(Structure): + pass +N10ssl_ctx_st4DOLLAR_18E._fields_ = [ + ('sess_connect', c_int), + ('sess_connect_renegotiate', c_int), + ('sess_connect_good', c_int), + ('sess_accept', c_int), + ('sess_accept_renegotiate', c_int), + ('sess_accept_good', c_int), + ('sess_miss', c_int), + ('sess_timeout', c_int), + ('sess_cache_full', c_int), + ('sess_hit', c_int), + ('sess_cb_hit', c_int), +] +assert sizeof(N10ssl_ctx_st4DOLLAR_18E) == 44, sizeof(N10ssl_ctx_st4DOLLAR_18E) +assert alignment(N10ssl_ctx_st4DOLLAR_18E) == 4, alignment(N10ssl_ctx_st4DOLLAR_18E) +class cert_st(Structure): + pass +ssl_ctx_st._fields_ = [ + ('method', POINTER(SSL_METHOD)), + ('cipher_list', POINTER(STACK)), + ('cipher_list_by_id', POINTER(STACK)), + ('cert_store', POINTER(x509_store_st)), + ('sessions', POINTER(lhash_st)), + ('session_cache_size', c_ulong), + ('session_cache_head', POINTER(ssl_session_st)), + ('session_cache_tail', POINTER(ssl_session_st)), + ('session_cache_mode', c_int), + ('session_timeout', c_long), + ('new_session_cb', CFUNCTYPE(c_int, POINTER(ssl_st), POINTER(SSL_SESSION))), + ('remove_session_cb', CFUNCTYPE(None, POINTER(ssl_ctx_st), POINTER(SSL_SESSION))), + ('get_session_cb', CFUNCTYPE(POINTER(SSL_SESSION), POINTER(ssl_st), POINTER(c_ubyte), c_int, POINTER(c_int))), + ('stats', N10ssl_ctx_st4DOLLAR_18E), + ('references', c_int), + ('app_verify_callback', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), c_void_p)), + ('app_verify_arg', c_void_p), + ('default_passwd_callback', POINTER(pem_password_cb)), + ('default_passwd_callback_userdata', c_void_p), + ('client_cert_cb', CFUNCTYPE(c_int, POINTER(SSL), POINTER(POINTER(X509)), POINTER(POINTER(EVP_PKEY)))), + ('ex_data', CRYPTO_EX_DATA), + ('rsa_md5', POINTER(EVP_MD)), + ('md5', POINTER(EVP_MD)), + ('sha1', POINTER(EVP_MD)), + ('extra_certs', POINTER(STACK)), + ('comp_methods', POINTER(STACK)), + ('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)), + ('client_CA', POINTER(STACK)), + ('options', c_ulong), + ('mode', c_ulong), + ('max_cert_list', c_long), + ('cert', POINTER(cert_st)), + ('read_ahead', c_int), + ('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)), + ('msg_callback_arg', c_void_p), + ('verify_mode', c_int), + ('verify_depth', c_int), + ('sid_ctx_length', c_uint), + ('sid_ctx', c_ubyte * 32), + ('default_verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), + ('generate_session_id', GEN_SESSION_CB), + ('purpose', c_int), + ('trust', c_int), + ('quiet_shutdown', c_int), +] +assert sizeof(ssl_ctx_st) == 248, sizeof(ssl_ctx_st) +assert alignment(ssl_ctx_st) == 4, alignment(ssl_ctx_st) +cert_st._fields_ = [ +] +class ssl2_state_st(Structure): + pass +class ssl3_state_st(Structure): + pass +ssl_st._fields_ = [ + ('version', c_int), + ('type', c_int), + ('method', POINTER(SSL_METHOD)), + ('rbio', POINTER(BIO)), + ('wbio', POINTER(BIO)), + ('bbio', POINTER(BIO)), + ('rwstate', c_int), + ('in_handshake', c_int), + ('handshake_func', CFUNCTYPE(c_int)), + ('server', c_int), + ('new_session', c_int), + ('quiet_shutdown', c_int), + ('shutdown', c_int), + ('state', c_int), + ('rstate', c_int), + ('init_buf', POINTER(BUF_MEM)), + ('init_msg', c_void_p), + ('init_num', c_int), + ('init_off', c_int), + ('packet', POINTER(c_ubyte)), + ('packet_length', c_uint), + ('s2', POINTER(ssl2_state_st)), + ('s3', POINTER(ssl3_state_st)), + ('read_ahead', c_int), + ('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)), + ('msg_callback_arg', c_void_p), + ('hit', c_int), + ('purpose', c_int), + ('trust', c_int), + ('cipher_list', POINTER(STACK)), + ('cipher_list_by_id', POINTER(STACK)), + ('enc_read_ctx', POINTER(EVP_CIPHER_CTX)), + ('read_hash', POINTER(EVP_MD)), + ('expand', POINTER(COMP_CTX)), + ('enc_write_ctx', POINTER(EVP_CIPHER_CTX)), + ('write_hash', POINTER(EVP_MD)), + ('compress', POINTER(COMP_CTX)), + ('cert', POINTER(cert_st)), + ('sid_ctx_length', c_uint), + ('sid_ctx', c_ubyte * 32), + ('session', POINTER(SSL_SESSION)), + ('generate_session_id', GEN_SESSION_CB), + ('verify_mode', c_int), + ('verify_depth', c_int), + ('verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), + ('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)), + ('error', c_int), + ('error_code', c_int), + ('ctx', POINTER(SSL_CTX)), + ('debug', c_int), + ('verify_result', c_long), + ('ex_data', CRYPTO_EX_DATA), + ('client_CA', POINTER(STACK)), + ('references', c_int), + ('options', c_ulong), + ('mode', c_ulong), + ('max_cert_list', c_long), + ('first_packet', c_int), + ('client_version', c_int), +] +assert sizeof(ssl_st) == 268, sizeof(ssl_st) +assert alignment(ssl_st) == 4, alignment(ssl_st) +class N13ssl2_state_st4DOLLAR_19E(Structure): + pass +N13ssl2_state_st4DOLLAR_19E._fields_ = [ + ('conn_id_length', c_uint), + ('cert_type', c_uint), + ('cert_length', c_uint), + ('csl', c_uint), + ('clear', c_uint), + ('enc', c_uint), + ('ccl', c_ubyte * 32), + ('cipher_spec_length', c_uint), + ('session_id_length', c_uint), + ('clen', c_uint), + ('rlen', c_uint), +] +assert sizeof(N13ssl2_state_st4DOLLAR_19E) == 72, sizeof(N13ssl2_state_st4DOLLAR_19E) +assert alignment(N13ssl2_state_st4DOLLAR_19E) == 4, alignment(N13ssl2_state_st4DOLLAR_19E) +ssl2_state_st._fields_ = [ + ('three_byte_header', c_int), + ('clear_text', c_int), + ('escape', c_int), + ('ssl2_rollback', c_int), + ('wnum', c_uint), + ('wpend_tot', c_int), + ('wpend_buf', POINTER(c_ubyte)), + ('wpend_off', c_int), + ('wpend_len', c_int), + ('wpend_ret', c_int), + ('rbuf_left', c_int), + ('rbuf_offs', c_int), + ('rbuf', POINTER(c_ubyte)), + ('wbuf', POINTER(c_ubyte)), + ('write_ptr', POINTER(c_ubyte)), + ('padding', c_uint), + ('rlength', c_uint), + ('ract_data_length', c_int), + ('wlength', c_uint), + ('wact_data_length', c_int), + ('ract_data', POINTER(c_ubyte)), + ('wact_data', POINTER(c_ubyte)), + ('mac_data', POINTER(c_ubyte)), + ('read_key', POINTER(c_ubyte)), + ('write_key', POINTER(c_ubyte)), + ('challenge_length', c_uint), + ('challenge', c_ubyte * 32), + ('conn_id_length', c_uint), + ('conn_id', c_ubyte * 16), + ('key_material_length', c_uint), + ('key_material', c_ubyte * 48), + ('read_sequence', c_ulong), + ('write_sequence', c_ulong), + ('tmp', N13ssl2_state_st4DOLLAR_19E), +] +assert sizeof(ssl2_state_st) == 288, sizeof(ssl2_state_st) +assert alignment(ssl2_state_st) == 4, alignment(ssl2_state_st) +SSL2_STATE = ssl2_state_st +class ssl3_record_st(Structure): + pass +ssl3_record_st._fields_ = [ + ('type', c_int), + ('length', c_uint), + ('off', c_uint), + ('data', POINTER(c_ubyte)), + ('input', POINTER(c_ubyte)), + ('comp', POINTER(c_ubyte)), +] +assert sizeof(ssl3_record_st) == 24, sizeof(ssl3_record_st) +assert alignment(ssl3_record_st) == 4, alignment(ssl3_record_st) +SSL3_RECORD = ssl3_record_st +class ssl3_buffer_st(Structure): + pass +size_t = __darwin_size_t +ssl3_buffer_st._fields_ = [ + ('buf', POINTER(c_ubyte)), + ('len', size_t), + ('offset', c_int), + ('left', c_int), +] +assert sizeof(ssl3_buffer_st) == 16, sizeof(ssl3_buffer_st) +assert alignment(ssl3_buffer_st) == 4, alignment(ssl3_buffer_st) +SSL3_BUFFER = ssl3_buffer_st +class N13ssl3_state_st4DOLLAR_20E(Structure): + pass +N13ssl3_state_st4DOLLAR_20E._fields_ = [ + ('cert_verify_md', c_ubyte * 72), + ('finish_md', c_ubyte * 72), + ('finish_md_len', c_int), + ('peer_finish_md', c_ubyte * 72), + ('peer_finish_md_len', c_int), + ('message_size', c_ulong), + ('message_type', c_int), + ('new_cipher', POINTER(SSL_CIPHER)), + ('dh', POINTER(DH)), + ('next_state', c_int), + ('reuse_message', c_int), + ('cert_req', c_int), + ('ctype_num', c_int), + ('ctype', c_char * 7), + ('ca_names', POINTER(STACK)), + ('use_rsa_tmp', c_int), + ('key_block_length', c_int), + ('key_block', POINTER(c_ubyte)), + ('new_sym_enc', POINTER(EVP_CIPHER)), + ('new_hash', POINTER(EVP_MD)), + ('new_compression', POINTER(SSL_COMP)), + ('cert_request', c_int), +] +assert sizeof(N13ssl3_state_st4DOLLAR_20E) == 296, sizeof(N13ssl3_state_st4DOLLAR_20E) +assert alignment(N13ssl3_state_st4DOLLAR_20E) == 4, alignment(N13ssl3_state_st4DOLLAR_20E) +ssl3_state_st._fields_ = [ + ('flags', c_long), + ('delay_buf_pop_ret', c_int), + ('read_sequence', c_ubyte * 8), + ('read_mac_secret', c_ubyte * 36), + ('write_sequence', c_ubyte * 8), + ('write_mac_secret', c_ubyte * 36), + ('server_random', c_ubyte * 32), + ('client_random', c_ubyte * 32), + ('need_empty_fragments', c_int), + ('empty_fragment_done', c_int), + ('rbuf', SSL3_BUFFER), + ('wbuf', SSL3_BUFFER), + ('rrec', SSL3_RECORD), + ('wrec', SSL3_RECORD), + ('alert_fragment', c_ubyte * 2), + ('alert_fragment_len', c_uint), + ('handshake_fragment', c_ubyte * 4), + ('handshake_fragment_len', c_uint), + ('wnum', c_uint), + ('wpend_tot', c_int), + ('wpend_type', c_int), + ('wpend_ret', c_int), + ('wpend_buf', POINTER(c_ubyte)), + ('finish_dgst1', EVP_MD_CTX), + ('finish_dgst2', EVP_MD_CTX), + ('change_cipher_spec', c_int), + ('warn_alert', c_int), + ('fatal_alert', c_int), + ('alert_dispatch', c_int), + ('send_alert', c_ubyte * 2), + ('renegotiate', c_int), + ('total_renegotiations', c_int), + ('num_renegotiations', c_int), + ('in_read_app_data', c_int), + ('tmp', N13ssl3_state_st4DOLLAR_20E), +] +assert sizeof(ssl3_state_st) == 648, sizeof(ssl3_state_st) +assert alignment(ssl3_state_st) == 4, alignment(ssl3_state_st) +SSL3_STATE = ssl3_state_st +stack_st._fields_ = [ + ('num', c_int), + ('data', POINTER(STRING)), + ('sorted', c_int), + ('num_alloc', c_int), + ('comp', CFUNCTYPE(c_int, POINTER(STRING), POINTER(STRING))), +] +assert sizeof(stack_st) == 20, sizeof(stack_st) +assert alignment(stack_st) == 4, alignment(stack_st) +class ui_st(Structure): + pass +ui_st._fields_ = [ +] +UI = ui_st +class ui_method_st(Structure): + pass +ui_method_st._fields_ = [ +] +UI_METHOD = ui_method_st +class ui_string_st(Structure): + pass +ui_string_st._fields_ = [ +] +UI_STRING = ui_string_st + +# values for enumeration 'UI_string_types' +UI_string_types = c_int # enum +class X509_objects_st(Structure): + pass +X509_objects_st._fields_ = [ + ('nid', c_int), + ('a2i', CFUNCTYPE(c_int)), + ('i2a', CFUNCTYPE(c_int)), +] +assert sizeof(X509_objects_st) == 12, sizeof(X509_objects_st) +assert alignment(X509_objects_st) == 4, alignment(X509_objects_st) +X509_OBJECTS = X509_objects_st +X509_algor_st._fields_ = [ + ('algorithm', POINTER(ASN1_OBJECT)), + ('parameter', POINTER(ASN1_TYPE)), +] +assert sizeof(X509_algor_st) == 8, sizeof(X509_algor_st) +assert alignment(X509_algor_st) == 4, alignment(X509_algor_st) +class X509_val_st(Structure): + pass +X509_val_st._fields_ = [ + ('notBefore', POINTER(ASN1_TIME)), + ('notAfter', POINTER(ASN1_TIME)), +] +assert sizeof(X509_val_st) == 8, sizeof(X509_val_st) +assert alignment(X509_val_st) == 4, alignment(X509_val_st) +X509_VAL = X509_val_st +class X509_pubkey_st(Structure): + pass +X509_pubkey_st._fields_ = [ + ('algor', POINTER(X509_ALGOR)), + ('public_key', POINTER(ASN1_BIT_STRING)), + ('pkey', POINTER(EVP_PKEY)), +] +assert sizeof(X509_pubkey_st) == 12, sizeof(X509_pubkey_st) +assert alignment(X509_pubkey_st) == 4, alignment(X509_pubkey_st) +X509_PUBKEY = X509_pubkey_st +class X509_sig_st(Structure): + pass +X509_sig_st._fields_ = [ + ('algor', POINTER(X509_ALGOR)), + ('digest', POINTER(ASN1_OCTET_STRING)), +] +assert sizeof(X509_sig_st) == 8, sizeof(X509_sig_st) +assert alignment(X509_sig_st) == 4, alignment(X509_sig_st) +X509_SIG = X509_sig_st +class X509_name_entry_st(Structure): + pass +X509_name_entry_st._fields_ = [ + ('object', POINTER(ASN1_OBJECT)), + ('value', POINTER(ASN1_STRING)), + ('set', c_int), + ('size', c_int), +] +assert sizeof(X509_name_entry_st) == 16, sizeof(X509_name_entry_st) +assert alignment(X509_name_entry_st) == 4, alignment(X509_name_entry_st) +X509_NAME_ENTRY = X509_name_entry_st +X509_name_st._fields_ = [ + ('entries', POINTER(STACK)), + ('modified', c_int), + ('bytes', POINTER(BUF_MEM)), + ('hash', c_ulong), +] +assert sizeof(X509_name_st) == 16, sizeof(X509_name_st) +assert alignment(X509_name_st) == 4, alignment(X509_name_st) +class X509_extension_st(Structure): + pass +X509_extension_st._fields_ = [ + ('object', POINTER(ASN1_OBJECT)), + ('critical', ASN1_BOOLEAN), + ('value', POINTER(ASN1_OCTET_STRING)), +] +assert sizeof(X509_extension_st) == 12, sizeof(X509_extension_st) +assert alignment(X509_extension_st) == 4, alignment(X509_extension_st) +X509_EXTENSION = X509_extension_st +class x509_attributes_st(Structure): + pass +class N18x509_attributes_st4DOLLAR_13E(Union): + pass +N18x509_attributes_st4DOLLAR_13E._fields_ = [ + ('ptr', STRING), + ('set', POINTER(STACK)), + ('single', POINTER(ASN1_TYPE)), +] +assert sizeof(N18x509_attributes_st4DOLLAR_13E) == 4, sizeof(N18x509_attributes_st4DOLLAR_13E) +assert alignment(N18x509_attributes_st4DOLLAR_13E) == 4, alignment(N18x509_attributes_st4DOLLAR_13E) +x509_attributes_st._fields_ = [ + ('object', POINTER(ASN1_OBJECT)), + ('single', c_int), + ('value', N18x509_attributes_st4DOLLAR_13E), +] +assert sizeof(x509_attributes_st) == 12, sizeof(x509_attributes_st) +assert alignment(x509_attributes_st) == 4, alignment(x509_attributes_st) +X509_ATTRIBUTE = x509_attributes_st +class X509_req_info_st(Structure): + pass +X509_req_info_st._fields_ = [ + ('enc', ASN1_ENCODING), + ('version', POINTER(ASN1_INTEGER)), + ('subject', POINTER(X509_NAME)), + ('pubkey', POINTER(X509_PUBKEY)), + ('attributes', POINTER(STACK)), +] +assert sizeof(X509_req_info_st) == 28, sizeof(X509_req_info_st) +assert alignment(X509_req_info_st) == 4, alignment(X509_req_info_st) +X509_REQ_INFO = X509_req_info_st +class X509_req_st(Structure): + pass +X509_req_st._fields_ = [ + ('req_info', POINTER(X509_REQ_INFO)), + ('sig_alg', POINTER(X509_ALGOR)), + ('signature', POINTER(ASN1_BIT_STRING)), + ('references', c_int), +] +assert sizeof(X509_req_st) == 16, sizeof(X509_req_st) +assert alignment(X509_req_st) == 4, alignment(X509_req_st) +X509_REQ = X509_req_st +class x509_cinf_st(Structure): + pass +x509_cinf_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('serialNumber', POINTER(ASN1_INTEGER)), + ('signature', POINTER(X509_ALGOR)), + ('issuer', POINTER(X509_NAME)), + ('validity', POINTER(X509_VAL)), + ('subject', POINTER(X509_NAME)), + ('key', POINTER(X509_PUBKEY)), + ('issuerUID', POINTER(ASN1_BIT_STRING)), + ('subjectUID', POINTER(ASN1_BIT_STRING)), + ('extensions', POINTER(STACK)), +] +assert sizeof(x509_cinf_st) == 40, sizeof(x509_cinf_st) +assert alignment(x509_cinf_st) == 4, alignment(x509_cinf_st) +X509_CINF = x509_cinf_st +class x509_cert_aux_st(Structure): + pass +x509_cert_aux_st._fields_ = [ + ('trust', POINTER(STACK)), + ('reject', POINTER(STACK)), + ('alias', POINTER(ASN1_UTF8STRING)), + ('keyid', POINTER(ASN1_OCTET_STRING)), + ('other', POINTER(STACK)), +] +assert sizeof(x509_cert_aux_st) == 20, sizeof(x509_cert_aux_st) +assert alignment(x509_cert_aux_st) == 4, alignment(x509_cert_aux_st) +X509_CERT_AUX = x509_cert_aux_st +class AUTHORITY_KEYID_st(Structure): + pass +x509_st._fields_ = [ + ('cert_info', POINTER(X509_CINF)), + ('sig_alg', POINTER(X509_ALGOR)), + ('signature', POINTER(ASN1_BIT_STRING)), + ('valid', c_int), + ('references', c_int), + ('name', STRING), + ('ex_data', CRYPTO_EX_DATA), + ('ex_pathlen', c_long), + ('ex_flags', c_ulong), + ('ex_kusage', c_ulong), + ('ex_xkusage', c_ulong), + ('ex_nscert', c_ulong), + ('skid', POINTER(ASN1_OCTET_STRING)), + ('akid', POINTER(AUTHORITY_KEYID_st)), + ('sha1_hash', c_ubyte * 20), + ('aux', POINTER(X509_CERT_AUX)), +] +assert sizeof(x509_st) == 84, sizeof(x509_st) +assert alignment(x509_st) == 4, alignment(x509_st) +AUTHORITY_KEYID_st._fields_ = [ +] +class x509_trust_st(Structure): + pass +x509_trust_st._fields_ = [ + ('trust', c_int), + ('flags', c_int), + ('check_trust', CFUNCTYPE(c_int, POINTER(x509_trust_st), POINTER(X509), c_int)), + ('name', STRING), + ('arg1', c_int), + ('arg2', c_void_p), +] +assert sizeof(x509_trust_st) == 24, sizeof(x509_trust_st) +assert alignment(x509_trust_st) == 4, alignment(x509_trust_st) +X509_TRUST = x509_trust_st +class X509_revoked_st(Structure): + pass +X509_revoked_st._fields_ = [ + ('serialNumber', POINTER(ASN1_INTEGER)), + ('revocationDate', POINTER(ASN1_TIME)), + ('extensions', POINTER(STACK)), + ('sequence', c_int), +] +assert sizeof(X509_revoked_st) == 16, sizeof(X509_revoked_st) +assert alignment(X509_revoked_st) == 4, alignment(X509_revoked_st) +X509_REVOKED = X509_revoked_st +class X509_crl_info_st(Structure): + pass +X509_crl_info_st._fields_ = [ + ('version', POINTER(ASN1_INTEGER)), + ('sig_alg', POINTER(X509_ALGOR)), + ('issuer', POINTER(X509_NAME)), + ('lastUpdate', POINTER(ASN1_TIME)), + ('nextUpdate', POINTER(ASN1_TIME)), + ('revoked', POINTER(STACK)), + ('extensions', POINTER(STACK)), + ('enc', ASN1_ENCODING), +] +assert sizeof(X509_crl_info_st) == 40, sizeof(X509_crl_info_st) +assert alignment(X509_crl_info_st) == 4, alignment(X509_crl_info_st) +X509_CRL_INFO = X509_crl_info_st +X509_crl_st._fields_ = [ + ('crl', POINTER(X509_CRL_INFO)), + ('sig_alg', POINTER(X509_ALGOR)), + ('signature', POINTER(ASN1_BIT_STRING)), + ('references', c_int), +] +assert sizeof(X509_crl_st) == 16, sizeof(X509_crl_st) +assert alignment(X509_crl_st) == 4, alignment(X509_crl_st) +class private_key_st(Structure): + pass +private_key_st._fields_ = [ + ('version', c_int), + ('enc_algor', POINTER(X509_ALGOR)), + ('enc_pkey', POINTER(ASN1_OCTET_STRING)), + ('dec_pkey', POINTER(EVP_PKEY)), + ('key_length', c_int), + ('key_data', STRING), + ('key_free', c_int), + ('cipher', EVP_CIPHER_INFO), + ('references', c_int), +] +assert sizeof(private_key_st) == 52, sizeof(private_key_st) +assert alignment(private_key_st) == 4, alignment(private_key_st) +X509_PKEY = private_key_st +class X509_info_st(Structure): + pass +X509_info_st._fields_ = [ + ('x509', POINTER(X509)), + ('crl', POINTER(X509_CRL)), + ('x_pkey', POINTER(X509_PKEY)), + ('enc_cipher', EVP_CIPHER_INFO), + ('enc_len', c_int), + ('enc_data', STRING), + ('references', c_int), +] +assert sizeof(X509_info_st) == 44, sizeof(X509_info_st) +assert alignment(X509_info_st) == 4, alignment(X509_info_st) +X509_INFO = X509_info_st +class Netscape_spkac_st(Structure): + pass +Netscape_spkac_st._fields_ = [ + ('pubkey', POINTER(X509_PUBKEY)), + ('challenge', POINTER(ASN1_IA5STRING)), +] +assert sizeof(Netscape_spkac_st) == 8, sizeof(Netscape_spkac_st) +assert alignment(Netscape_spkac_st) == 4, alignment(Netscape_spkac_st) +NETSCAPE_SPKAC = Netscape_spkac_st +class Netscape_spki_st(Structure): + pass +Netscape_spki_st._fields_ = [ + ('spkac', POINTER(NETSCAPE_SPKAC)), + ('sig_algor', POINTER(X509_ALGOR)), + ('signature', POINTER(ASN1_BIT_STRING)), +] +assert sizeof(Netscape_spki_st) == 12, sizeof(Netscape_spki_st) +assert alignment(Netscape_spki_st) == 4, alignment(Netscape_spki_st) +NETSCAPE_SPKI = Netscape_spki_st +class Netscape_certificate_sequence(Structure): + pass +Netscape_certificate_sequence._fields_ = [ + ('type', POINTER(ASN1_OBJECT)), + ('certs', POINTER(STACK)), +] +assert sizeof(Netscape_certificate_sequence) == 8, sizeof(Netscape_certificate_sequence) +assert alignment(Netscape_certificate_sequence) == 4, alignment(Netscape_certificate_sequence) +NETSCAPE_CERT_SEQUENCE = Netscape_certificate_sequence +class PBEPARAM_st(Structure): + pass +PBEPARAM_st._fields_ = [ + ('salt', POINTER(ASN1_OCTET_STRING)), + ('iter', POINTER(ASN1_INTEGER)), +] +assert sizeof(PBEPARAM_st) == 8, sizeof(PBEPARAM_st) +assert alignment(PBEPARAM_st) == 4, alignment(PBEPARAM_st) +PBEPARAM = PBEPARAM_st +class PBE2PARAM_st(Structure): + pass +PBE2PARAM_st._fields_ = [ + ('keyfunc', POINTER(X509_ALGOR)), + ('encryption', POINTER(X509_ALGOR)), +] +assert sizeof(PBE2PARAM_st) == 8, sizeof(PBE2PARAM_st) +assert alignment(PBE2PARAM_st) == 4, alignment(PBE2PARAM_st) +PBE2PARAM = PBE2PARAM_st +class PBKDF2PARAM_st(Structure): + pass +PBKDF2PARAM_st._fields_ = [ + ('salt', POINTER(ASN1_TYPE)), + ('iter', POINTER(ASN1_INTEGER)), + ('keylength', POINTER(ASN1_INTEGER)), + ('prf', POINTER(X509_ALGOR)), +] +assert sizeof(PBKDF2PARAM_st) == 16, sizeof(PBKDF2PARAM_st) +assert alignment(PBKDF2PARAM_st) == 4, alignment(PBKDF2PARAM_st) +PBKDF2PARAM = PBKDF2PARAM_st +class pkcs8_priv_key_info_st(Structure): + pass +pkcs8_priv_key_info_st._fields_ = [ + ('broken', c_int), + ('version', POINTER(ASN1_INTEGER)), + ('pkeyalg', POINTER(X509_ALGOR)), + ('pkey', POINTER(ASN1_TYPE)), + ('attributes', POINTER(STACK)), +] +assert sizeof(pkcs8_priv_key_info_st) == 20, sizeof(pkcs8_priv_key_info_st) +assert alignment(pkcs8_priv_key_info_st) == 4, alignment(pkcs8_priv_key_info_st) +PKCS8_PRIV_KEY_INFO = pkcs8_priv_key_info_st +class x509_hash_dir_st(Structure): + pass +x509_hash_dir_st._fields_ = [ + ('num_dirs', c_int), + ('dirs', POINTER(STRING)), + ('dirs_type', POINTER(c_int)), + ('num_dirs_alloced', c_int), +] +assert sizeof(x509_hash_dir_st) == 16, sizeof(x509_hash_dir_st) +assert alignment(x509_hash_dir_st) == 4, alignment(x509_hash_dir_st) +X509_HASH_DIR_CTX = x509_hash_dir_st +class x509_file_st(Structure): + pass +x509_file_st._fields_ = [ + ('num_paths', c_int), + ('num_alloced', c_int), + ('paths', POINTER(STRING)), + ('path_type', POINTER(c_int)), +] +assert sizeof(x509_file_st) == 16, sizeof(x509_file_st) +assert alignment(x509_file_st) == 4, alignment(x509_file_st) +X509_CERT_FILE_CTX = x509_file_st +class x509_object_st(Structure): + pass +class N14x509_object_st4DOLLAR_14E(Union): + pass +N14x509_object_st4DOLLAR_14E._fields_ = [ + ('ptr', STRING), + ('x509', POINTER(X509)), + ('crl', POINTER(X509_CRL)), + ('pkey', POINTER(EVP_PKEY)), +] +assert sizeof(N14x509_object_st4DOLLAR_14E) == 4, sizeof(N14x509_object_st4DOLLAR_14E) +assert alignment(N14x509_object_st4DOLLAR_14E) == 4, alignment(N14x509_object_st4DOLLAR_14E) +x509_object_st._fields_ = [ + ('type', c_int), + ('data', N14x509_object_st4DOLLAR_14E), +] +assert sizeof(x509_object_st) == 8, sizeof(x509_object_st) +assert alignment(x509_object_st) == 4, alignment(x509_object_st) +X509_OBJECT = x509_object_st +class x509_lookup_st(Structure): + pass +X509_LOOKUP = x509_lookup_st +class x509_lookup_method_st(Structure): + pass +x509_lookup_method_st._fields_ = [ + ('name', STRING), + ('new_item', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), + ('free', CFUNCTYPE(None, POINTER(X509_LOOKUP))), + ('init', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), + ('shutdown', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))), + ('ctrl', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_long, POINTER(STRING))), + ('get_by_subject', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(X509_OBJECT))), + ('get_by_issuer_serial', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(ASN1_INTEGER), POINTER(X509_OBJECT))), + ('get_by_fingerprint', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(c_ubyte), c_int, POINTER(X509_OBJECT))), + ('get_by_alias', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_int, POINTER(X509_OBJECT))), +] +assert sizeof(x509_lookup_method_st) == 40, sizeof(x509_lookup_method_st) +assert alignment(x509_lookup_method_st) == 4, alignment(x509_lookup_method_st) +X509_LOOKUP_METHOD = x509_lookup_method_st +x509_store_st._fields_ = [ + ('cache', c_int), + ('objs', POINTER(STACK)), + ('get_cert_methods', POINTER(STACK)), + ('flags', c_ulong), + ('purpose', c_int), + ('trust', c_int), + ('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), + ('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))), + ('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))), + ('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))), + ('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))), + ('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))), + ('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('ex_data', CRYPTO_EX_DATA), + ('references', c_int), + ('depth', c_int), +] +assert sizeof(x509_store_st) == 76, sizeof(x509_store_st) +assert alignment(x509_store_st) == 4, alignment(x509_store_st) +x509_lookup_st._fields_ = [ + ('init', c_int), + ('skip', c_int), + ('method', POINTER(X509_LOOKUP_METHOD)), + ('method_data', STRING), + ('store_ctx', POINTER(X509_STORE)), +] +assert sizeof(x509_lookup_st) == 20, sizeof(x509_lookup_st) +assert alignment(x509_lookup_st) == 4, alignment(x509_lookup_st) +time_t = __darwin_time_t +x509_store_ctx_st._fields_ = [ + ('ctx', POINTER(X509_STORE)), + ('current_method', c_int), + ('cert', POINTER(X509)), + ('untrusted', POINTER(STACK)), + ('purpose', c_int), + ('trust', c_int), + ('check_time', time_t), + ('flags', c_ulong), + ('other_ctx', c_void_p), + ('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))), + ('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))), + ('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))), + ('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))), + ('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))), + ('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))), + ('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))), + ('depth', c_int), + ('valid', c_int), + ('last_untrusted', c_int), + ('chain', POINTER(STACK)), + ('error_depth', c_int), + ('error', c_int), + ('current_cert', POINTER(X509)), + ('current_issuer', POINTER(X509)), + ('current_crl', POINTER(X509_CRL)), + ('ex_data', CRYPTO_EX_DATA), +] +assert sizeof(x509_store_ctx_st) == 116, sizeof(x509_store_ctx_st) +assert alignment(x509_store_ctx_st) == 4, alignment(x509_store_ctx_st) +va_list = __darwin_va_list +__darwin_off_t = __int64_t +fpos_t = __darwin_off_t +class __sbuf(Structure): + pass +__sbuf._fields_ = [ + ('_base', POINTER(c_ubyte)), + ('_size', c_int), +] +assert sizeof(__sbuf) == 8, sizeof(__sbuf) +assert alignment(__sbuf) == 4, alignment(__sbuf) +class __sFILEX(Structure): + pass +__sFILEX._fields_ = [ +] +class __sFILE(Structure): + pass +__sFILE._pack_ = 4 +__sFILE._fields_ = [ + ('_p', POINTER(c_ubyte)), + ('_r', c_int), + ('_w', c_int), + ('_flags', c_short), + ('_file', c_short), + ('_bf', __sbuf), + ('_lbfsize', c_int), + ('_cookie', c_void_p), + ('_close', CFUNCTYPE(c_int, c_void_p)), + ('_read', CFUNCTYPE(c_int, c_void_p, STRING, c_int)), + ('_seek', CFUNCTYPE(fpos_t, c_void_p, c_longlong, c_int)), + ('_write', CFUNCTYPE(c_int, c_void_p, STRING, c_int)), + ('_ub', __sbuf), + ('_extra', POINTER(__sFILEX)), + ('_ur', c_int), + ('_ubuf', c_ubyte * 3), + ('_nbuf', c_ubyte * 1), + ('_lb', __sbuf), + ('_blksize', c_int), + ('_offset', fpos_t), +] +assert sizeof(__sFILE) == 88, sizeof(__sFILE) +assert alignment(__sFILE) == 4, alignment(__sFILE) +FILE = __sFILE +ct_rune_t = __darwin_ct_rune_t +rune_t = __darwin_rune_t +class div_t(Structure): + pass +div_t._fields_ = [ + ('quot', c_int), + ('rem', c_int), +] +assert sizeof(div_t) == 8, sizeof(div_t) +assert alignment(div_t) == 4, alignment(div_t) +class ldiv_t(Structure): + pass +ldiv_t._fields_ = [ + ('quot', c_long), + ('rem', c_long), +] +assert sizeof(ldiv_t) == 8, sizeof(ldiv_t) +assert alignment(ldiv_t) == 4, alignment(ldiv_t) +class lldiv_t(Structure): + pass +lldiv_t._pack_ = 4 +lldiv_t._fields_ = [ + ('quot', c_longlong), + ('rem', c_longlong), +] +assert sizeof(lldiv_t) == 16, sizeof(lldiv_t) +assert alignment(lldiv_t) == 4, alignment(lldiv_t) +__darwin_dev_t = __int32_t +dev_t = __darwin_dev_t +__darwin_mode_t = __uint16_t +mode_t = __darwin_mode_t +class mcontext(Structure): + pass +mcontext._fields_ = [ +] +class mcontext64(Structure): + pass +mcontext64._fields_ = [ +] +class __darwin_pthread_handler_rec(Structure): + pass +__darwin_pthread_handler_rec._fields_ = [ + ('__routine', CFUNCTYPE(None, c_void_p)), + ('__arg', c_void_p), + ('__next', POINTER(__darwin_pthread_handler_rec)), +] +assert sizeof(__darwin_pthread_handler_rec) == 12, sizeof(__darwin_pthread_handler_rec) +assert alignment(__darwin_pthread_handler_rec) == 4, alignment(__darwin_pthread_handler_rec) +class _opaque_pthread_attr_t(Structure): + pass +_opaque_pthread_attr_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 36), +] +assert sizeof(_opaque_pthread_attr_t) == 40, sizeof(_opaque_pthread_attr_t) +assert alignment(_opaque_pthread_attr_t) == 4, alignment(_opaque_pthread_attr_t) +class _opaque_pthread_cond_t(Structure): + pass +_opaque_pthread_cond_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 24), +] +assert sizeof(_opaque_pthread_cond_t) == 28, sizeof(_opaque_pthread_cond_t) +assert alignment(_opaque_pthread_cond_t) == 4, alignment(_opaque_pthread_cond_t) +class _opaque_pthread_condattr_t(Structure): + pass +_opaque_pthread_condattr_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 4), +] +assert sizeof(_opaque_pthread_condattr_t) == 8, sizeof(_opaque_pthread_condattr_t) +assert alignment(_opaque_pthread_condattr_t) == 4, alignment(_opaque_pthread_condattr_t) +class _opaque_pthread_mutex_t(Structure): + pass +_opaque_pthread_mutex_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 40), +] +assert sizeof(_opaque_pthread_mutex_t) == 44, sizeof(_opaque_pthread_mutex_t) +assert alignment(_opaque_pthread_mutex_t) == 4, alignment(_opaque_pthread_mutex_t) +class _opaque_pthread_mutexattr_t(Structure): + pass +_opaque_pthread_mutexattr_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 8), +] +assert sizeof(_opaque_pthread_mutexattr_t) == 12, sizeof(_opaque_pthread_mutexattr_t) +assert alignment(_opaque_pthread_mutexattr_t) == 4, alignment(_opaque_pthread_mutexattr_t) +class _opaque_pthread_once_t(Structure): + pass +_opaque_pthread_once_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 4), +] +assert sizeof(_opaque_pthread_once_t) == 8, sizeof(_opaque_pthread_once_t) +assert alignment(_opaque_pthread_once_t) == 4, alignment(_opaque_pthread_once_t) +class _opaque_pthread_rwlock_t(Structure): + pass +_opaque_pthread_rwlock_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 124), +] +assert sizeof(_opaque_pthread_rwlock_t) == 128, sizeof(_opaque_pthread_rwlock_t) +assert alignment(_opaque_pthread_rwlock_t) == 4, alignment(_opaque_pthread_rwlock_t) +class _opaque_pthread_rwlockattr_t(Structure): + pass +_opaque_pthread_rwlockattr_t._fields_ = [ + ('__sig', c_long), + ('__opaque', c_char * 12), +] +assert sizeof(_opaque_pthread_rwlockattr_t) == 16, sizeof(_opaque_pthread_rwlockattr_t) +assert alignment(_opaque_pthread_rwlockattr_t) == 4, alignment(_opaque_pthread_rwlockattr_t) +class _opaque_pthread_t(Structure): + pass +_opaque_pthread_t._fields_ = [ + ('__sig', c_long), + ('__cleanup_stack', POINTER(__darwin_pthread_handler_rec)), + ('__opaque', c_char * 596), +] +assert sizeof(_opaque_pthread_t) == 604, sizeof(_opaque_pthread_t) +assert alignment(_opaque_pthread_t) == 4, alignment(_opaque_pthread_t) +__darwin_blkcnt_t = __int64_t +__darwin_blksize_t = __int32_t +__darwin_fsblkcnt_t = c_uint +__darwin_fsfilcnt_t = c_uint +__darwin_gid_t = __uint32_t +__darwin_id_t = __uint32_t +__darwin_ino_t = __uint32_t +__darwin_mach_port_name_t = __darwin_natural_t +__darwin_mach_port_t = __darwin_mach_port_name_t +__darwin_mcontext_t = POINTER(mcontext) +__darwin_mcontext64_t = POINTER(mcontext64) +__darwin_pid_t = __int32_t +__darwin_pthread_attr_t = _opaque_pthread_attr_t +__darwin_pthread_cond_t = _opaque_pthread_cond_t +__darwin_pthread_condattr_t = _opaque_pthread_condattr_t +__darwin_pthread_key_t = c_ulong +__darwin_pthread_mutex_t = _opaque_pthread_mutex_t +__darwin_pthread_mutexattr_t = _opaque_pthread_mutexattr_t +__darwin_pthread_once_t = _opaque_pthread_once_t +__darwin_pthread_rwlock_t = _opaque_pthread_rwlock_t +__darwin_pthread_rwlockattr_t = _opaque_pthread_rwlockattr_t +__darwin_pthread_t = POINTER(_opaque_pthread_t) +__darwin_sigset_t = __uint32_t +__darwin_suseconds_t = __int32_t +__darwin_uid_t = __uint32_t +__darwin_useconds_t = __uint32_t +__darwin_uuid_t = c_ubyte * 16 +class sigaltstack(Structure): + pass +sigaltstack._fields_ = [ + ('ss_sp', c_void_p), + ('ss_size', __darwin_size_t), + ('ss_flags', c_int), +] +assert sizeof(sigaltstack) == 12, sizeof(sigaltstack) +assert alignment(sigaltstack) == 4, alignment(sigaltstack) +__darwin_stack_t = sigaltstack +class ucontext(Structure): + pass +ucontext._fields_ = [ + ('uc_onstack', c_int), + ('uc_sigmask', __darwin_sigset_t), + ('uc_stack', __darwin_stack_t), + ('uc_link', POINTER(ucontext)), + ('uc_mcsize', __darwin_size_t), + ('uc_mcontext', __darwin_mcontext_t), +] +assert sizeof(ucontext) == 32, sizeof(ucontext) +assert alignment(ucontext) == 4, alignment(ucontext) +__darwin_ucontext_t = ucontext +class ucontext64(Structure): + pass +ucontext64._fields_ = [ + ('uc_onstack', c_int), + ('uc_sigmask', __darwin_sigset_t), + ('uc_stack', __darwin_stack_t), + ('uc_link', POINTER(ucontext64)), + ('uc_mcsize', __darwin_size_t), + ('uc_mcontext64', __darwin_mcontext64_t), +] +assert sizeof(ucontext64) == 32, sizeof(ucontext64) +assert alignment(ucontext64) == 4, alignment(ucontext64) +__darwin_ucontext64_t = ucontext64 +class timeval(Structure): + pass +timeval._fields_ = [ + ('tv_sec', __darwin_time_t), + ('tv_usec', __darwin_suseconds_t), +] +assert sizeof(timeval) == 8, sizeof(timeval) +assert alignment(timeval) == 4, alignment(timeval) +rlim_t = __int64_t +class rusage(Structure): + pass +rusage._fields_ = [ + ('ru_utime', timeval), + ('ru_stime', timeval), + ('ru_maxrss', c_long), + ('ru_ixrss', c_long), + ('ru_idrss', c_long), + ('ru_isrss', c_long), + ('ru_minflt', c_long), + ('ru_majflt', c_long), + ('ru_nswap', c_long), + ('ru_inblock', c_long), + ('ru_oublock', c_long), + ('ru_msgsnd', c_long), + ('ru_msgrcv', c_long), + ('ru_nsignals', c_long), + ('ru_nvcsw', c_long), + ('ru_nivcsw', c_long), +] +assert sizeof(rusage) == 72, sizeof(rusage) +assert alignment(rusage) == 4, alignment(rusage) +class rlimit(Structure): + pass +rlimit._pack_ = 4 +rlimit._fields_ = [ + ('rlim_cur', rlim_t), + ('rlim_max', rlim_t), +] +assert sizeof(rlimit) == 16, sizeof(rlimit) +assert alignment(rlimit) == 4, alignment(rlimit) +mcontext_t = __darwin_mcontext_t +mcontext64_t = __darwin_mcontext64_t +pthread_attr_t = __darwin_pthread_attr_t +sigset_t = __darwin_sigset_t +ucontext_t = __darwin_ucontext_t +ucontext64_t = __darwin_ucontext64_t +uid_t = __darwin_uid_t +class sigval(Union): + pass +sigval._fields_ = [ + ('sival_int', c_int), + ('sival_ptr', c_void_p), +] +assert sizeof(sigval) == 4, sizeof(sigval) +assert alignment(sigval) == 4, alignment(sigval) +class sigevent(Structure): + pass +sigevent._fields_ = [ + ('sigev_notify', c_int), + ('sigev_signo', c_int), + ('sigev_value', sigval), + ('sigev_notify_function', CFUNCTYPE(None, sigval)), + ('sigev_notify_attributes', POINTER(pthread_attr_t)), +] +assert sizeof(sigevent) == 20, sizeof(sigevent) +assert alignment(sigevent) == 4, alignment(sigevent) +class __siginfo(Structure): + pass +pid_t = __darwin_pid_t +__siginfo._fields_ = [ + ('si_signo', c_int), + ('si_errno', c_int), + ('si_code', c_int), + ('si_pid', pid_t), + ('si_uid', uid_t), + ('si_status', c_int), + ('si_addr', c_void_p), + ('si_value', sigval), + ('si_band', c_long), + ('pad', c_ulong * 7), +] +assert sizeof(__siginfo) == 64, sizeof(__siginfo) +assert alignment(__siginfo) == 4, alignment(__siginfo) +siginfo_t = __siginfo +class __sigaction_u(Union): + pass +__sigaction_u._fields_ = [ + ('__sa_handler', CFUNCTYPE(None, c_int)), + ('__sa_sigaction', CFUNCTYPE(None, c_int, POINTER(__siginfo), c_void_p)), +] +assert sizeof(__sigaction_u) == 4, sizeof(__sigaction_u) +assert alignment(__sigaction_u) == 4, alignment(__sigaction_u) +class __sigaction(Structure): + pass +__sigaction._fields_ = [ + ('__sigaction_u', __sigaction_u), + ('sa_tramp', CFUNCTYPE(None, c_void_p, c_int, c_int, POINTER(siginfo_t), c_void_p)), + ('sa_mask', sigset_t), + ('sa_flags', c_int), +] +assert sizeof(__sigaction) == 16, sizeof(__sigaction) +assert alignment(__sigaction) == 4, alignment(__sigaction) +class sigaction(Structure): + pass +sigaction._fields_ = [ + ('__sigaction_u', __sigaction_u), + ('sa_mask', sigset_t), + ('sa_flags', c_int), +] +assert sizeof(sigaction) == 12, sizeof(sigaction) +assert alignment(sigaction) == 4, alignment(sigaction) +sig_t = CFUNCTYPE(None, c_int) +stack_t = __darwin_stack_t +class sigvec(Structure): + pass +sigvec._fields_ = [ + ('sv_handler', CFUNCTYPE(None, c_int)), + ('sv_mask', c_int), + ('sv_flags', c_int), +] +assert sizeof(sigvec) == 12, sizeof(sigvec) +assert alignment(sigvec) == 4, alignment(sigvec) +class sigstack(Structure): + pass +sigstack._fields_ = [ + ('ss_sp', STRING), + ('ss_onstack', c_int), +] +assert sizeof(sigstack) == 8, sizeof(sigstack) +assert alignment(sigstack) == 4, alignment(sigstack) +u_char = c_ubyte +u_short = c_ushort +u_int = c_uint +u_long = c_ulong +ushort = c_ushort +uint = c_uint +u_quad_t = u_int64_t +quad_t = int64_t +qaddr_t = POINTER(quad_t) +caddr_t = STRING +daddr_t = int32_t +fixpt_t = u_int32_t +blkcnt_t = __darwin_blkcnt_t +blksize_t = __darwin_blksize_t +gid_t = __darwin_gid_t +in_addr_t = __uint32_t +in_port_t = __uint16_t +ino_t = __darwin_ino_t +key_t = __int32_t +nlink_t = __uint16_t +off_t = __darwin_off_t +segsz_t = int32_t +swblk_t = int32_t +clock_t = __darwin_clock_t +ssize_t = __darwin_ssize_t +useconds_t = __darwin_useconds_t +suseconds_t = __darwin_suseconds_t +fd_mask = __int32_t +class fd_set(Structure): + pass +fd_set._fields_ = [ + ('fds_bits', __int32_t * 32), +] +assert sizeof(fd_set) == 128, sizeof(fd_set) +assert alignment(fd_set) == 4, alignment(fd_set) +pthread_cond_t = __darwin_pthread_cond_t +pthread_condattr_t = __darwin_pthread_condattr_t +pthread_mutex_t = __darwin_pthread_mutex_t +pthread_mutexattr_t = __darwin_pthread_mutexattr_t +pthread_once_t = __darwin_pthread_once_t +pthread_rwlock_t = __darwin_pthread_rwlock_t +pthread_rwlockattr_t = __darwin_pthread_rwlockattr_t +pthread_t = __darwin_pthread_t +pthread_key_t = __darwin_pthread_key_t +fsblkcnt_t = __darwin_fsblkcnt_t +fsfilcnt_t = __darwin_fsfilcnt_t + +# values for enumeration 'idtype_t' +idtype_t = c_int # enum +id_t = __darwin_id_t +class wait(Union): + pass +class N4wait3DOLLAR_3E(Structure): + pass +N4wait3DOLLAR_3E._fields_ = [ + ('w_Termsig', c_uint, 7), + ('w_Coredump', c_uint, 1), + ('w_Retcode', c_uint, 8), + ('w_Filler', c_uint, 16), +] +assert sizeof(N4wait3DOLLAR_3E) == 4, sizeof(N4wait3DOLLAR_3E) +assert alignment(N4wait3DOLLAR_3E) == 4, alignment(N4wait3DOLLAR_3E) +class N4wait3DOLLAR_4E(Structure): + pass +N4wait3DOLLAR_4E._fields_ = [ + ('w_Stopval', c_uint, 8), + ('w_Stopsig', c_uint, 8), + ('w_Filler', c_uint, 16), +] +assert sizeof(N4wait3DOLLAR_4E) == 4, sizeof(N4wait3DOLLAR_4E) +assert alignment(N4wait3DOLLAR_4E) == 4, alignment(N4wait3DOLLAR_4E) +wait._fields_ = [ + ('w_status', c_int), + ('w_T', N4wait3DOLLAR_3E), + ('w_S', N4wait3DOLLAR_4E), +] +assert sizeof(wait) == 4, sizeof(wait) +assert alignment(wait) == 4, alignment(wait) +class timespec(Structure): + pass +timespec._fields_ = [ + ('tv_sec', time_t), + ('tv_nsec', c_long), +] +assert sizeof(timespec) == 8, sizeof(timespec) +assert alignment(timespec) == 4, alignment(timespec) +class tm(Structure): + pass +tm._fields_ = [ + ('tm_sec', c_int), + ('tm_min', c_int), + ('tm_hour', c_int), + ('tm_mday', c_int), + ('tm_mon', c_int), + ('tm_year', c_int), + ('tm_wday', c_int), + ('tm_yday', c_int), + ('tm_isdst', c_int), + ('tm_gmtoff', c_long), + ('tm_zone', STRING), +] +assert sizeof(tm) == 44, sizeof(tm) +assert alignment(tm) == 4, alignment(tm) +__gnuc_va_list = STRING +ptrdiff_t = c_int +int8_t = c_byte +int16_t = c_short +uint8_t = c_ubyte +uint16_t = c_ushort +uint32_t = c_uint +uint64_t = c_ulonglong +int_least8_t = int8_t +int_least16_t = int16_t +int_least32_t = int32_t +int_least64_t = int64_t +uint_least8_t = uint8_t +uint_least16_t = uint16_t +uint_least32_t = uint32_t +uint_least64_t = uint64_t +int_fast8_t = int8_t +int_fast16_t = int16_t +int_fast32_t = int32_t +int_fast64_t = int64_t +uint_fast8_t = uint8_t +uint_fast16_t = uint16_t +uint_fast32_t = uint32_t +uint_fast64_t = uint64_t +intptr_t = c_long +uintptr_t = c_ulong +intmax_t = c_longlong +uintmax_t = c_ulonglong +__all__ = ['ENGINE', 'pkcs7_enc_content_st', '__int16_t', + 'X509_REVOKED', 'SSL_CTX', 'UIT_BOOLEAN', + '__darwin_time_t', 'ucontext64_t', 'int_fast32_t', + 'pem_ctx_st', 'uint8_t', 'fpos_t', 'X509', 'COMP_CTX', + 'tm', 'N10pem_ctx_st4DOLLAR_17E', 'swblk_t', + 'ASN1_TEMPLATE', '__darwin_pthread_t', 'fixpt_t', + 'BIO_METHOD', 'ASN1_PRINTABLESTRING', 'EVP_ENCODE_CTX', + 'dh_method', 'bio_f_buffer_ctx_struct', 'in_port_t', + 'X509_SIG', '__darwin_ssize_t', '__darwin_sigset_t', + 'wait', 'uint_fast16_t', 'N12asn1_type_st4DOLLAR_11E', + 'uint_least8_t', 'pthread_rwlock_t', 'ASN1_IA5STRING', + 'fsfilcnt_t', 'ucontext', '__uint64_t', 'timespec', + 'x509_cinf_st', 'COMP_METHOD', 'MD5_CTX', 'buf_mem_st', + 'ASN1_ENCODING_st', 'PBEPARAM', 'X509_NAME_ENTRY', + '__darwin_va_list', 'ucontext_t', 'lhash_st', + 'N4wait3DOLLAR_4E', '__darwin_uuid_t', + '_ossl_old_des_ks_struct', 'id_t', 'ASN1_BIT_STRING', + 'va_list', '__darwin_wchar_t', 'pthread_key_t', + 'pkcs7_signer_info_st', 'ASN1_METHOD', 'DSA_SIG', 'DSA', + 'UIT_NONE', 'pthread_t', '__darwin_useconds_t', + 'uint_fast8_t', 'UI_STRING', 'DES_cblock', + '__darwin_mcontext64_t', 'rlim_t', 'PEM_Encode_Seal_st', + 'SHAstate_st', 'u_quad_t', 'openssl_fptr', + '_opaque_pthread_rwlockattr_t', + 'N18x509_attributes_st4DOLLAR_13E', + '__darwin_pthread_rwlock_t', 'daddr_t', 'ui_string_st', + 'x509_file_st', 'X509_req_info_st', 'int_least64_t', + 'evp_Encode_Ctx_st', 'X509_OBJECTS', 'CRYPTO_EX_DATA', + '__int8_t', 'AUTHORITY_KEYID_st', '_opaque_pthread_attr_t', + 'sigstack', 'EVP_CIPHER_CTX', 'X509_extension_st', 'pid_t', + 'RSA_METHOD', 'PEM_USER', 'pem_recip_st', 'env_md_ctx_st', + 'rc5_key_st', 'ui_st', 'X509_PUBKEY', 'u_int8_t', + 'ASN1_ITEM_st', 'pkcs7_recip_info_st', 'ssl2_state_st', + 'off_t', 'N10ssl_ctx_st4DOLLAR_18E', 'crypto_ex_data_st', + 'ui_method_st', '__darwin_pthread_rwlockattr_t', + 'CRYPTO_EX_dup', '__darwin_ino_t', '__sFILE', + 'OSUnknownByteOrder', 'BN_MONT_CTX', 'ASN1_NULL', 'time_t', + 'CRYPTO_EX_new', 'asn1_type_st', 'CRYPTO_EX_DATA_FUNCS', + 'user_time_t', 'BIGNUM', 'pthread_rwlockattr_t', + 'ASN1_VALUE_st', 'DH_METHOD', '__darwin_off_t', + '_opaque_pthread_t', 'bn_blinding_st', 'RSA', 'ssize_t', + 'mcontext64_t', 'user_long_t', 'fsblkcnt_t', 'cert_st', + '__darwin_pthread_condattr_t', 'X509_PKEY', + '__darwin_id_t', '__darwin_nl_item', 'SSL2_STATE', 'FILE', + 'pthread_mutexattr_t', 'size_t', + '_ossl_old_des_key_schedule', 'pkcs7_issuer_and_serial_st', + 'sigval', 'CRYPTO_MEM_LEAK_CB', 'X509_NAME', 'blkcnt_t', + 'uint_least16_t', '__darwin_dev_t', 'evp_cipher_info_st', + 'BN_BLINDING', 'ssl3_state_st', 'uint_least64_t', + 'user_addr_t', 'DES_key_schedule', 'RIPEMD160_CTX', + 'u_char', 'X509_algor_st', 'uid_t', 'sess_cert_st', + 'u_int64_t', 'u_int16_t', 'sigset_t', '__darwin_ptrdiff_t', + 'ASN1_CTX', 'STACK', '__int32_t', 'UI_METHOD', + 'NETSCAPE_SPKI', 'UIT_PROMPT', 'st_CRYPTO_EX_DATA_IMPL', + 'cast_key_st', 'X509_HASH_DIR_CTX', 'sigevent', + 'user_ssize_t', 'clock_t', 'aes_key_st', + '__darwin_socklen_t', '__darwin_intptr_t', 'int_fast64_t', + 'asn1_string_table_st', 'uint_fast32_t', + 'ASN1_VISIBLESTRING', 'DSA_SIG_st', 'obj_name_st', + 'X509_LOOKUP_METHOD', 'u_int32_t', 'EVP_CIPHER_INFO', + '__gnuc_va_list', 'AES_KEY', 'PKCS7_ISSUER_AND_SERIAL', + 'BN_CTX', '__darwin_blkcnt_t', 'key_t', 'SHA_CTX', + 'pkcs7_signed_st', 'SSL', 'N10pem_ctx_st4DOLLAR_16E', + 'pthread_attr_t', 'EVP_MD', 'uint', 'ASN1_BOOLEAN', + 'ino_t', '__darwin_clock_t', 'ASN1_OCTET_STRING', + 'asn1_ctx_st', 'BIO_F_BUFFER_CTX', 'bn_mont_ctx_st', + 'X509_REQ_INFO', 'PEM_CTX', 'sigvec', + '__darwin_pthread_mutexattr_t', 'x509_attributes_st', + 'stack_t', '__darwin_mode_t', '__mbstate_t', + 'asn1_object_st', 'ASN1_ENCODING', '__uint8_t', + 'LHASH_NODE', 'PKCS7_SIGNER_INFO', 'asn1_method_st', + 'stack_st', 'bio_info_cb', 'div_t', 'UIT_VERIFY', + 'PBEPARAM_st', 'N4wait3DOLLAR_3E', 'quad_t', '__siginfo', + '__darwin_mbstate_t', 'rsa_st', 'ASN1_UNIVERSALSTRING', + 'uint64_t', 'ssl_comp_st', 'X509_OBJECT', 'pthread_cond_t', + 'DH', '__darwin_wctype_t', 'PKCS7_ENVELOPE', 'ASN1_TLC_st', + 'sig_atomic_t', 'BIO', 'nlink_t', 'BUF_MEM', 'SSL3_RECORD', + 'bio_method_st', 'timeval', 'UI_string_types', 'BIO_dummy', + 'ssl_ctx_st', 'NETSCAPE_CERT_SEQUENCE', + 'BIT_STRING_BITNAME_st', '__darwin_pthread_attr_t', + 'int8_t', '__darwin_wint_t', 'OBJ_NAME', + 'PKCS8_PRIV_KEY_INFO', 'PBE2PARAM_st', + 'LHASH_DOALL_FN_TYPE', 'x509_st', 'X509_VAL', 'dev_t', + 'ASN1_TEMPLATE_st', 'MD5state_st', '__uint16_t', + 'LHASH_DOALL_ARG_FN_TYPE', 'mdc2_ctx_st', 'SSL3_STATE', + 'ssl3_buffer_st', 'ASN1_ITEM_EXP', + '_opaque_pthread_condattr_t', 'mode_t', 'ASN1_VALUE', + 'qaddr_t', '__darwin_gid_t', 'EVP_PKEY', 'CRYPTO_EX_free', + '_ossl_old_des_cblock', 'X509_INFO', 'asn1_string_st', + 'intptr_t', 'UIT_INFO', 'int_fast8_t', 'sigaltstack', + 'env_md_st', 'LHASH', '__darwin_ucontext_t', + 'PKCS7_SIGN_ENVELOPE', '__darwin_mcontext_t', 'ct_rune_t', + 'MD2_CTX', 'pthread_once_t', 'SSL3_BUFFER', 'fd_mask', + 'ASN1_TYPE', 'PKCS7_SIGNED', 'ssl3_record_st', 'BF_KEY', + 'MD4state_st', 'MD4_CTX', 'int16_t', 'SSL_CIPHER', + 'rune_t', 'X509_TRUST', 'siginfo_t', 'X509_STORE', + '__sbuf', 'X509_STORE_CTX', '__darwin_blksize_t', 'ldiv_t', + 'ASN1_TIME', 'SSL_METHOD', 'X509_LOOKUP', + 'Netscape_spki_st', 'P_PID', 'sigaction', 'sig_t', + 'hostent', 'x509_cert_aux_st', '_opaque_pthread_cond_t', + 'segsz_t', 'ushort', '__darwin_ct_rune_t', 'fd_set', + 'BN_RECP_CTX', 'x509_lookup_st', 'uint16_t', 'pkcs7_st', + 'asn1_header_st', '__darwin_pthread_key_t', + 'x509_trust_st', '__darwin_pthread_handler_rec', 'int32_t', + 'X509_CRL_INFO', 'N11evp_pkey_st4DOLLAR_12E', 'MDC2_CTX', + 'N23_ossl_old_des_ks_struct4DOLLAR_10E', 'ASN1_HEADER', + 'X509_crl_info_st', 'LHASH_HASH_FN_TYPE', + '_opaque_pthread_mutexattr_t', 'ssl_st', + 'N8pkcs7_st4DOLLAR_15E', 'evp_pkey_st', + 'pkcs7_signedandenveloped_st', '__darwin_mach_port_t', + 'EVP_PBE_KEYGEN', '_opaque_pthread_mutex_t', + 'ASN1_UTCTIME', 'mcontext', 'crypto_ex_data_func_st', + 'u_long', 'PBKDF2PARAM_st', 'rc4_key_st', 'DSA_METHOD', + 'EVP_CIPHER', 'BIT_STRING_BITNAME', 'PKCS7_RECIP_INFO', + 'ssl3_enc_method', 'X509_CERT_AUX', 'uintmax_t', + 'int_fast16_t', 'RC5_32_KEY', 'ucontext64', 'ASN1_INTEGER', + 'u_short', 'N14x509_object_st4DOLLAR_14E', 'mcontext64', + 'X509_sig_st', 'ASN1_GENERALSTRING', 'PKCS7', '__sFILEX', + 'X509_name_entry_st', 'ssl_session_st', 'caddr_t', + 'bignum_st', 'X509_CINF', '__darwin_pthread_cond_t', + 'ASN1_TLC', 'PKCS7_ENCRYPT', 'NETSCAPE_SPKAC', + 'Netscape_spkac_st', 'idtype_t', 'UIT_ERROR', + 'uint_fast64_t', 'in_addr_t', 'pthread_mutex_t', + '__int64_t', 'ASN1_BMPSTRING', 'uint32_t', + 'PEM_ENCODE_SEAL_CTX', 'suseconds_t', 'ASN1_OBJECT', + 'X509_val_st', 'private_key_st', 'CRYPTO_dynlock', + 'X509_objects_st', 'CRYPTO_EX_DATA_IMPL', + 'pthread_condattr_t', 'PKCS7_DIGEST', 'uint_least32_t', + 'ASN1_STRING', '__uint32_t', 'P_PGID', 'rsa_meth_st', + 'X509_crl_st', 'RC2_KEY', '__darwin_fsfilcnt_t', + 'X509_revoked_st', 'PBE2PARAM', 'blksize_t', + 'Netscape_certificate_sequence', 'ssl_cipher_st', + 'bignum_ctx', 'register_t', 'ASN1_UTF8STRING', + 'pkcs7_encrypted_st', 'RC4_KEY', '__darwin_ucontext64_t', + 'N13ssl2_state_st4DOLLAR_19E', 'bn_recp_ctx_st', + 'CAST_KEY', 'X509_ATTRIBUTE', '__darwin_suseconds_t', + '__sigaction', 'user_ulong_t', 'syscall_arg_t', + 'evp_cipher_ctx_st', 'X509_ALGOR', 'mcontext_t', + 'const_DES_cblock', '__darwin_fsblkcnt_t', 'dsa_st', + 'int_least8_t', 'MD2state_st', 'X509_EXTENSION', + 'GEN_SESSION_CB', 'int_least16_t', '__darwin_wctrans_t', + 'PBKDF2PARAM', 'x509_lookup_method_st', 'pem_password_cb', + 'X509_info_st', 'x509_store_st', '__darwin_natural_t', + 'X509_pubkey_st', 'pkcs7_digest_st', '__darwin_size_t', + 'ASN1_STRING_TABLE', 'OSLittleEndian', 'RIPEMD160state_st', + 'pkcs7_enveloped_st', 'UI', 'ptrdiff_t', 'X509_REQ', + 'CRYPTO_dynlock_value', 'X509_req_st', 'x509_store_ctx_st', + 'N13ssl3_state_st4DOLLAR_20E', 'lhash_node_st', + '__darwin_pthread_mutex_t', 'LHASH_COMP_FN_TYPE', + '__darwin_rune_t', 'rlimit', '__darwin_pthread_once_t', + 'OSBigEndian', 'uintptr_t', '__darwin_uid_t', 'u_int', + 'ASN1_T61STRING', 'gid_t', 'ssl_method_st', 'ASN1_ITEM', + 'ASN1_ENUMERATED', '_opaque_pthread_rwlock_t', + 'pkcs8_priv_key_info_st', 'intmax_t', 'sigcontext', + 'X509_CRL', 'rc2_key_st', 'engine_st', 'x509_object_st', + '_opaque_pthread_once_t', 'DES_ks', 'SSL_COMP', + 'dsa_method', 'int64_t', 'bio_st', 'bf_key_st', + 'ASN1_GENERALIZEDTIME', 'PKCS7_ENC_CONTENT', + '__darwin_pid_t', 'lldiv_t', 'comp_method_st', + 'EVP_MD_CTX', 'evp_cipher_st', 'X509_name_st', + 'x509_hash_dir_st', '__darwin_mach_port_name_t', + 'useconds_t', 'user_size_t', 'SSL_SESSION', 'rusage', + 'ssl_crock_st', 'int_least32_t', '__sigaction_u', 'dh_st', + 'P_ALL', '__darwin_stack_t', 'N6DES_ks3DOLLAR_9E', + 'comp_ctx_st', 'X509_CERT_FILE_CTX'] diff --git a/lib3/2to3/lib2to3/tests/data/py2_test_grammar.py b/lib3/2to3/lib2to3/tests/data/py2_test_grammar.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/py2_test_grammar.py @@ -0,0 +1,974 @@ +# Python test set -- part 1, grammar. +# This just tests whether the parser accepts them all. + +# NOTE: When you run this test as a script from the command line, you +# get warnings about certain hex/oct constants. Since those are +# issued by the parser, you can't suppress them by adding a +# filterwarnings() call to this module. Therefore, to shut up the +# regression test, the filterwarnings() call has been added to +# regrtest.py. + +from test.test_support import run_unittest, check_syntax_error +import unittest +import sys +# testing import * +from sys import * + +class TokenTests(unittest.TestCase): + + def testBackslash(self): + # Backslash means line continuation: + x = 1 \ + + 1 + self.assertEquals(x, 2, 'backslash for line continuation') + + # Backslash does not means continuation in comments :\ + x = 0 + self.assertEquals(x, 0, 'backslash ending comment') + + def testPlainIntegers(self): + self.assertEquals(0xff, 255) + self.assertEquals(0o377, 255) + self.assertEquals(2147483647, 0o17777777777) + # "0x" is not a valid literal + self.assertRaises(SyntaxError, eval, "0x") + from sys import maxsize + if maxint == 2147483647: + self.assertEquals(-2147483647-1, -0o20000000000) + # XXX -2147483648 + self.assert_(0o37777777777 > 0) + self.assert_(0xffffffff > 0) + for s in '2147483648', '040000000000', '0x100000000': + try: + x = eval(s) + except OverflowError: + self.fail("OverflowError on huge integer literal %r" % s) + elif maxint == 9223372036854775807: + self.assertEquals(-9223372036854775807-1, -0o1000000000000000000000) + self.assert_(0o1777777777777777777777 > 0) + self.assert_(0xffffffffffffffff > 0) + for s in '9223372036854775808', '02000000000000000000000', \ + '0x10000000000000000': + try: + x = eval(s) + except OverflowError: + self.fail("OverflowError on huge integer literal %r" % s) + else: + self.fail('Weird maxint value %r' % maxint) + + def testLongIntegers(self): + x = 0 + x = 0 + x = 0xffffffffffffffff + x = 0xffffffffffffffff + x = 077777777777777777 + x = 077777777777777777 + x = 123456789012345678901234567890 + x = 123456789012345678901234567890 + + def testFloats(self): + x = 3.14 + x = 314. + x = 0.314 + # XXX x = 000.314 + x = .314 + x = 3e14 + x = 3E14 + x = 3e-14 + x = 3e+14 + x = 3.e14 + x = .3e14 + x = 3.1e4 + + def testStringLiterals(self): + x = ''; y = ""; self.assert_(len(x) == 0 and x == y) + x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39) + x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34) + x = "doesn't \"shrink\" does it" + y = 'doesn\'t "shrink" does it' + self.assert_(len(x) == 24 and x == y) + x = "does \"shrink\" doesn't it" + y = 'does "shrink" doesn\'t it' + self.assert_(len(x) == 24 and x == y) + x = """ +The "quick" +brown fox +jumps over +the 'lazy' dog. +""" + y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' + self.assertEquals(x, y) + y = ''' +The "quick" +brown fox +jumps over +the 'lazy' dog. +''' + self.assertEquals(x, y) + y = "\n\ +The \"quick\"\n\ +brown fox\n\ +jumps over\n\ +the 'lazy' dog.\n\ +" + self.assertEquals(x, y) + y = '\n\ +The \"quick\"\n\ +brown fox\n\ +jumps over\n\ +the \'lazy\' dog.\n\ +' + self.assertEquals(x, y) + + +class GrammarTests(unittest.TestCase): + + # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE + # XXX can't test in a script -- this rule is only used when interactive + + # file_input: (NEWLINE | stmt)* ENDMARKER + # Being tested as this very moment this very module + + # expr_input: testlist NEWLINE + # XXX Hard to test -- used only in calls to input() + + def testEvalInput(self): + # testlist ENDMARKER + x = eval('1, 0 or 1') + + def testFuncdef(self): + ### 'def' NAME parameters ':' suite + ### parameters: '(' [varargslist] ')' + ### varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' ('**'|'*' '*') NAME] + ### | ('**'|'*' '*') NAME) + ### | fpdef ['=' test] (',' fpdef ['=' test])* [','] + ### fpdef: NAME | '(' fplist ')' + ### fplist: fpdef (',' fpdef)* [','] + ### arglist: (argument ',')* (argument | *' test [',' '**' test] | '**' test) + ### argument: [test '='] test # Really [keyword '='] test + def f1(): pass + f1() + f1(*()) + f1(*(), **{}) + def f2(one_argument): pass + def f3(two, arguments): pass + def f4(two, xxx_todo_changeme): (compound, (argument, list)) = xxx_todo_changeme; pass + def f5(xxx_todo_changeme1, two): (compound, first) = xxx_todo_changeme1; pass + self.assertEquals(f2.__code__.co_varnames, ('one_argument',)) + self.assertEquals(f3.__code__.co_varnames, ('two', 'arguments')) + if sys.platform.startswith('java'): + self.assertEquals(f4.__code__.co_varnames, + ('two', '(compound, (argument, list))', 'compound', 'argument', + 'list',)) + self.assertEquals(f5.__code__.co_varnames, + ('(compound, first)', 'two', 'compound', 'first')) + else: + self.assertEquals(f4.__code__.co_varnames, + ('two', '.1', 'compound', 'argument', 'list')) + self.assertEquals(f5.__code__.co_varnames, + ('.0', 'two', 'compound', 'first')) + def a1(one_arg,): pass + def a2(two, args,): pass + def v0(*rest): pass + def v1(a, *rest): pass + def v2(a, b, *rest): pass + def v3(a, xxx_todo_changeme2, *rest): (b, c) = xxx_todo_changeme2; return a, b, c, rest + + f1() + f2(1) + f2(1,) + f3(1, 2) + f3(1, 2,) + f4(1, (2, (3, 4))) + v0() + v0(1) + v0(1,) + v0(1,2) + v0(1,2,3,4,5,6,7,8,9,0) + v1(1) + v1(1,) + v1(1,2) + v1(1,2,3) + v1(1,2,3,4,5,6,7,8,9,0) + v2(1,2) + v2(1,2,3) + v2(1,2,3,4) + v2(1,2,3,4,5,6,7,8,9,0) + v3(1,(2,3)) + v3(1,(2,3),4) + v3(1,(2,3),4,5,6,7,8,9,0) + + # ceval unpacks the formal arguments into the first argcount names; + # thus, the names nested inside tuples must appear after these names. + if sys.platform.startswith('java'): + self.assertEquals(v3.__code__.co_varnames, ('a', '(b, c)', 'rest', 'b', 'c')) + else: + self.assertEquals(v3.__code__.co_varnames, ('a', '.1', 'rest', 'b', 'c')) + self.assertEquals(v3(1, (2, 3), 4), (1, 2, 3, (4,))) + def d01(a=1): pass + d01() + d01(1) + d01(*(1,)) + d01(**{'a':2}) + def d11(a, b=1): pass + d11(1) + d11(1, 2) + d11(1, **{'b':2}) + def d21(a, b, c=1): pass + d21(1, 2) + d21(1, 2, 3) + d21(*(1, 2, 3)) + d21(1, *(2, 3)) + d21(1, 2, *(3,)) + d21(1, 2, **{'c':3}) + def d02(a=1, b=2): pass + d02() + d02(1) + d02(1, 2) + d02(*(1, 2)) + d02(1, *(2,)) + d02(1, **{'b':2}) + d02(**{'a': 1, 'b': 2}) + def d12(a, b=1, c=2): pass + d12(1) + d12(1, 2) + d12(1, 2, 3) + def d22(a, b, c=1, d=2): pass + d22(1, 2) + d22(1, 2, 3) + d22(1, 2, 3, 4) + def d01v(a=1, *rest): pass + d01v() + d01v(1) + d01v(1, 2) + d01v(*(1, 2, 3, 4)) + d01v(*(1,)) + d01v(**{'a':2}) + def d11v(a, b=1, *rest): pass + d11v(1) + d11v(1, 2) + d11v(1, 2, 3) + def d21v(a, b, c=1, *rest): pass + d21v(1, 2) + d21v(1, 2, 3) + d21v(1, 2, 3, 4) + d21v(*(1, 2, 3, 4)) + d21v(1, 2, **{'c': 3}) + def d02v(a=1, b=2, *rest): pass + d02v() + d02v(1) + d02v(1, 2) + d02v(1, 2, 3) + d02v(1, *(2, 3, 4)) + d02v(**{'a': 1, 'b': 2}) + def d12v(a, b=1, c=2, *rest): pass + d12v(1) + d12v(1, 2) + d12v(1, 2, 3) + d12v(1, 2, 3, 4) + d12v(*(1, 2, 3, 4)) + d12v(1, 2, *(3, 4, 5)) + d12v(1, *(2,), **{'c': 3}) + def d22v(a, b, c=1, d=2, *rest): pass + d22v(1, 2) + d22v(1, 2, 3) + d22v(1, 2, 3, 4) + d22v(1, 2, 3, 4, 5) + d22v(*(1, 2, 3, 4)) + d22v(1, 2, *(3, 4, 5)) + d22v(1, *(2, 3), **{'d': 4}) + def d31v(xxx_todo_changeme3): (x) = xxx_todo_changeme3; pass + d31v(1) + def d32v(xxx_todo_changeme4): (x,) = xxx_todo_changeme4; pass + d32v((1,)) + + # keyword arguments after *arglist + def f(*args, **kwargs): + return args, kwargs + self.assertEquals(f(1, x=2, *[3, 4], y=5), ((1, 3, 4), + {'x':2, 'y':5})) + self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)") + self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)") + + # Check ast errors in *args and *kwargs + check_syntax_error(self, "f(*g(1=2))") + check_syntax_error(self, "f(**g(1=2))") + + def testLambdef(self): + ### lambdef: 'lambda' [varargslist] ':' test + l1 = lambda : 0 + self.assertEquals(l1(), 0) + l2 = lambda : a[d] # XXX just testing the expression + l3 = lambda : [2 < x for x in [-1, 3, 0]] + self.assertEquals(l3(), [0, 1, 0]) + l4 = lambda x = lambda y = lambda z=1 : z : y() : x() + self.assertEquals(l4(), 1) + l5 = lambda x, y, z=2: x + y + z + self.assertEquals(l5(1, 2), 5) + self.assertEquals(l5(1, 2, 3), 6) + check_syntax_error(self, "lambda x: x = 2") + check_syntax_error(self, "lambda (None,): None") + + ### stmt: simple_stmt | compound_stmt + # Tested below + + def testSimpleStmt(self): + ### simple_stmt: small_stmt (';' small_stmt)* [';'] + x = 1; pass; del x + def foo(): + # verify statments that end with semi-colons + x = 1; pass; del x; + foo() + + ### small_stmt: expr_stmt | print_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt | exec_stmt + # Tested below + + def testExprStmt(self): + # (exprlist '=')* exprlist + 1 + 1, 2, 3 + x = 1 + x = 1, 2, 3 + x = y = z = 1, 2, 3 + x, y, z = 1, 2, 3 + abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4) + + check_syntax_error(self, "x + 1 = 1") + check_syntax_error(self, "a + 1 = b + 2") + + def testPrintStmt(self): + # 'print' (test ',')* [test] + import io + + # Can't test printing to real stdout without comparing output + # which is not available in unittest. + save_stdout = sys.stdout + sys.stdout = io.StringIO() + + print(1, 2, 3) + print(1, 2, 3, end=' ') + print() + print(0 or 1, 0 or 1, end=' ') + print(0 or 1) + + # 'print' '>>' test ',' + print(1, 2, 3, file=sys.stdout) + print(1, 2, 3, end=' ', file=sys.stdout) + print(file=sys.stdout) + print(0 or 1, 0 or 1, end=' ', file=sys.stdout) + print(0 or 1, file=sys.stdout) + + # test printing to an instance + class Gulp: + def write(self, msg): pass + + gulp = Gulp() + print(1, 2, 3, file=gulp) + print(1, 2, 3, end=' ', file=gulp) + print(file=gulp) + print(0 or 1, 0 or 1, end=' ', file=gulp) + print(0 or 1, file=gulp) + + # test print >> None + def driver(): + oldstdout = sys.stdout + sys.stdout = Gulp() + try: + tellme(Gulp()) + tellme() + finally: + sys.stdout = oldstdout + + # we should see this once + def tellme(file=sys.stdout): + print('hello world', file=file) + + driver() + + # we should not see this at all + def tellme(file=None): + print('goodbye universe', file=file) + + driver() + + self.assertEqual(sys.stdout.getvalue(), '''\ +1 2 3 +1 2 3 +1 1 1 +1 2 3 +1 2 3 +1 1 1 +hello world +''') + sys.stdout = save_stdout + + # syntax errors + check_syntax_error(self, 'print ,') + check_syntax_error(self, 'print >> x,') + + def testDelStmt(self): + # 'del' exprlist + abc = [1,2,3] + x, y, z = abc + xyz = x, y, z + + del abc + del x, y, (z, xyz) + + def testPassStmt(self): + # 'pass' + pass + + # flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt + # Tested below + + def testBreakStmt(self): + # 'break' + while 1: break + + def testContinueStmt(self): + # 'continue' + i = 1 + while i: i = 0; continue + + msg = "" + while not msg: + msg = "ok" + try: + continue + msg = "continue failed to continue inside try" + except: + msg = "continue inside try called except block" + if msg != "ok": + self.fail(msg) + + msg = "" + while not msg: + msg = "finally block not called" + try: + continue + finally: + msg = "ok" + if msg != "ok": + self.fail(msg) + + def test_break_continue_loop(self): + # This test warrants an explanation. It is a test specifically for SF bugs + # #463359 and #462937. The bug is that a 'break' statement executed or + # exception raised inside a try/except inside a loop, *after* a continue + # statement has been executed in that loop, will cause the wrong number of + # arguments to be popped off the stack and the instruction pointer reset to + # a very small number (usually 0.) Because of this, the following test + # *must* written as a function, and the tracking vars *must* be function + # arguments with default values. Otherwise, the test will loop and loop. + + def test_inner(extra_burning_oil = 1, count=0): + big_hippo = 2 + while big_hippo: + count += 1 + try: + if extra_burning_oil and big_hippo == 1: + extra_burning_oil -= 1 + break + big_hippo -= 1 + continue + except: + raise + if count > 2 or big_hippo != 1: + self.fail("continue then break in try/except in loop broken!") + test_inner() + + def testReturn(self): + # 'return' [testlist] + def g1(): return + def g2(): return 1 + g1() + x = g2() + check_syntax_error(self, "class foo:return 1") + + def testYield(self): + check_syntax_error(self, "class foo:yield 1") + + def testRaise(self): + # 'raise' test [',' test] + try: raise RuntimeError('just testing') + except RuntimeError: pass + try: raise KeyboardInterrupt + except KeyboardInterrupt: pass + + def testImport(self): + # 'import' dotted_as_names + import sys + import time, sys + # 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names) + from time import time + from time import (time) + # not testable inside a function, but already done at top of the module + # from sys import * + from sys import path, argv + from sys import (path, argv) + from sys import (path, argv,) + + def testGlobal(self): + # 'global' NAME (',' NAME)* + global a + global a, b + global one, two, three, four, five, six, seven, eight, nine, ten + + def testExec(self): + # 'exec' expr ['in' expr [',' expr]] + z = None + del z + exec('z=1+1\n') + if z != 2: self.fail('exec \'z=1+1\'\\n') + del z + exec('z=1+1') + if z != 2: self.fail('exec \'z=1+1\'') + z = None + del z + import types + if hasattr(types, "UnicodeType"): + exec(r"""if 1: + exec u'z=1+1\n' + if z != 2: self.fail('exec u\'z=1+1\'\\n') + del z + exec u'z=1+1' + if z != 2: self.fail('exec u\'z=1+1\'')""") + g = {} + exec('z = 1', g) + if '__builtins__' in g: del g['__builtins__'] + if g != {'z': 1}: self.fail('exec \'z = 1\' in g') + g = {} + l = {} + + import warnings + warnings.filterwarnings("ignore", "global statement", module="") + exec('global a; a = 1; b = 2', g, l) + if '__builtins__' in g: del g['__builtins__'] + if '__builtins__' in l: del l['__builtins__'] + if (g, l) != ({'a':1}, {'b':2}): + self.fail('exec ... in g (%s), l (%s)' %(g,l)) + + def testAssert(self): + # assert_stmt: 'assert' test [',' test] + assert 1 + assert 1, 1 + assert lambda x:x + assert 1, lambda x:x+1 + try: + assert 0, "msg" + except AssertionError as e: + self.assertEquals(e.args[0], "msg") + else: + if __debug__: + self.fail("AssertionError not raised by assert 0") + + ### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef + # Tested below + + def testIf(self): + # 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] + if 1: pass + if 1: pass + else: pass + if 0: pass + elif 0: pass + if 0: pass + elif 0: pass + elif 0: pass + elif 0: pass + else: pass + + def testWhile(self): + # 'while' test ':' suite ['else' ':' suite] + while 0: pass + while 0: pass + else: pass + + # Issue1920: "while 0" is optimized away, + # ensure that the "else" clause is still present. + x = 0 + while 0: + x = 1 + else: + x = 2 + self.assertEquals(x, 2) + + def testFor(self): + # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite] + for i in 1, 2, 3: pass + for i, j, k in (): pass + else: pass + class Squares: + def __init__(self, max): + self.max = max + self.sofar = [] + def __len__(self): return len(self.sofar) + def __getitem__(self, i): + if not 0 <= i < self.max: raise IndexError + n = len(self.sofar) + while n <= i: + self.sofar.append(n*n) + n = n+1 + return self.sofar[i] + n = 0 + for x in Squares(10): n = n+x + if n != 285: + self.fail('for over growing sequence') + + result = [] + for x, in [(1,), (2,), (3,)]: + result.append(x) + self.assertEqual(result, [1, 2, 3]) + + def testTry(self): + ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite] + ### | 'try' ':' suite 'finally' ':' suite + ### except_clause: 'except' [expr [('as' | ',') expr]] + try: + 1/0 + except ZeroDivisionError: + pass + else: + pass + try: 1/0 + except EOFError: pass + except TypeError as msg: pass + except RuntimeError as msg: pass + except: pass + else: pass + try: 1/0 + except (EOFError, TypeError, ZeroDivisionError): pass + try: 1/0 + except (EOFError, TypeError, ZeroDivisionError) as msg: pass + try: pass + finally: pass + + def testSuite(self): + # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT + if 1: pass + if 1: + pass + if 1: + # + # + # + pass + pass + # + pass + # + + def testTest(self): + ### and_test ('or' and_test)* + ### and_test: not_test ('and' not_test)* + ### not_test: 'not' not_test | comparison + if not 1: pass + if 1 and 1: pass + if 1 or 1: pass + if not not not 1: pass + if not 1 and 1 and 1: pass + if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass + + def testComparison(self): + ### comparison: expr (comp_op expr)* + ### comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' + if 1: pass + x = (1 == 1) + if 1 == 1: pass + if 1 != 1: pass + if 1 != 1: pass + if 1 < 1: pass + if 1 > 1: pass + if 1 <= 1: pass + if 1 >= 1: pass + if 1 is 1: pass + if 1 is not 1: pass + if 1 in (): pass + if 1 not in (): pass + if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 != 1 in 1 not in 1 is 1 is not 1: pass + + def testBinaryMaskOps(self): + x = 1 & 1 + x = 1 ^ 1 + x = 1 | 1 + + def testShiftOps(self): + x = 1 << 1 + x = 1 >> 1 + x = 1 << 1 >> 1 + + def testAdditiveOps(self): + x = 1 + x = 1 + 1 + x = 1 - 1 - 1 + x = 1 - 1 + 1 - 1 + 1 + + def testMultiplicativeOps(self): + x = 1 * 1 + x = 1 / 1 + x = 1 % 1 + x = 1 / 1 * 1 % 1 + + def testUnaryOps(self): + x = +1 + x = -1 + x = ~1 + x = ~1 ^ 1 & 1 | 1 & 1 ^ -1 + x = -1*1/1 + 1*1 - ---1*1 + + def testSelectors(self): + ### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME + ### subscript: expr | [expr] ':' [expr] + + import sys, time + c = sys.path[0] + x = time.time() + x = sys.modules['time'].time() + a = '01234' + c = a[0] + c = a[-1] + s = a[0:5] + s = a[:5] + s = a[0:] + s = a[:] + s = a[-5:] + s = a[:-1] + s = a[-4:-3] + # A rough test of SF bug 1333982. http://python.org/sf/1333982 + # The testing here is fairly incomplete. + # Test cases should include: commas with 1 and 2 colons + d = {} + d[1] = 1 + d[1,] = 2 + d[1,2] = 3 + d[1,2,3] = 4 + L = list(d) + L.sort() + self.assertEquals(str(L), '[1, (1,), (1, 2), (1, 2, 3)]') + + def testAtoms(self): + ### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictmaker] '}' | '`' testlist '`' | NAME | NUMBER | STRING + ### dictmaker: test ':' test (',' test ':' test)* [','] + + x = (1) + x = (1 or 2 or 3) + x = (1 or 2 or 3, 2, 3) + + x = [] + x = [1] + x = [1 or 2 or 3] + x = [1 or 2 or 3, 2, 3] + x = [] + + x = {} + x = {'one': 1} + x = {'one': 1,} + x = {'one' or 'two': 1 or 2} + x = {'one': 1, 'two': 2} + x = {'one': 1, 'two': 2,} + x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6} + + x = repr(x) + x = repr(1 or 2 or 3) + self.assertEqual(repr((1,2)), '(1, 2)') + + x = x + x = 'x' + x = 123 + + ### exprlist: expr (',' expr)* [','] + ### testlist: test (',' test)* [','] + # These have been exercised enough above + + def testClassdef(self): + # 'class' NAME ['(' [testlist] ')'] ':' suite + class B: pass + class B2(): pass + class C1(B): pass + class C2(B): pass + class D(C1, C2, B): pass + class C: + def meth1(self): pass + def meth2(self, arg): pass + def meth3(self, a1, a2): pass + # decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE + # decorators: decorator+ + # decorated: decorators (classdef | funcdef) + def class_decorator(x): + x.decorated = True + return x + @class_decorator + class G: + pass + self.assertEqual(G.decorated, True) + + def testListcomps(self): + # list comprehension tests + nums = [1, 2, 3, 4, 5] + strs = ["Apple", "Banana", "Coconut"] + spcs = [" Apple", " Banana ", "Coco nut "] + + self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut']) + self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15]) + self.assertEqual([x for x in nums if x > 2], [3, 4, 5]) + self.assertEqual([(i, s) for i in nums for s in strs], + [(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'), + (2, 'Apple'), (2, 'Banana'), (2, 'Coconut'), + (3, 'Apple'), (3, 'Banana'), (3, 'Coconut'), + (4, 'Apple'), (4, 'Banana'), (4, 'Coconut'), + (5, 'Apple'), (5, 'Banana'), (5, 'Coconut')]) + self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]], + [(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'), + (3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'), + (5, 'Banana'), (5, 'Coconut')]) + self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)], + [[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]]) + + def test_in_func(l): + return [None < x < 3 for x in l if x > 2] + + self.assertEqual(test_in_func(nums), [False, False, False]) + + def test_nested_front(): + self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]], + [[1, 2], [3, 4], [5, 6]]) + + test_nested_front() + + check_syntax_error(self, "[i, s for i in nums for s in strs]") + check_syntax_error(self, "[x if y]") + + suppliers = [ + (1, "Boeing"), + (2, "Ford"), + (3, "Macdonalds") + ] + + parts = [ + (10, "Airliner"), + (20, "Engine"), + (30, "Cheeseburger") + ] + + suppart = [ + (1, 10), (1, 20), (2, 20), (3, 30) + ] + + x = [ + (sname, pname) + for (sno, sname) in suppliers + for (pno, pname) in parts + for (sp_sno, sp_pno) in suppart + if sno == sp_sno and pno == sp_pno + ] + + self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'), + ('Macdonalds', 'Cheeseburger')]) + + def testGenexps(self): + # generator expression tests + g = ([x for x in range(10)] for x in range(1)) + self.assertEqual(next(g), [x for x in range(10)]) + try: + next(g) + self.fail('should produce StopIteration exception') + except StopIteration: + pass + + a = 1 + try: + g = (a for d in a) + next(g) + self.fail('should produce TypeError') + except TypeError: + pass + + self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd']) + self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy']) + + a = [x for x in range(10)] + b = (x for x in (y for y in a)) + self.assertEqual(sum(b), sum([x for x in range(10)])) + + self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)])) + self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2])) + self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0) + check_syntax_error(self, "foo(x for x in range(10), 100)") + check_syntax_error(self, "foo(100, x for x in range(10))") + + def testComprehensionSpecials(self): + # test for outmost iterable precomputation + x = 10; g = (i for i in range(x)); x = 5 + self.assertEqual(len(list(g)), 10) + + # This should hold, since we're only precomputing outmost iterable. + x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x)) + x = 5; t = True; + self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g)) + + # Grammar allows multiple adjacent 'if's in listcomps and genexps, + # even though it's silly. Make sure it works (ifelse broke this.) + self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7]) + self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7]) + + # verify unpacking single element tuples in listcomp/genexp. + self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6]) + self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9]) + + def test_with_statement(self): + class manager(object): + def __enter__(self): + return (1, 2) + def __exit__(self, *args): + pass + + with manager(): + pass + with manager() as x: + pass + with manager() as (x, y): + pass + with manager(), manager(): + pass + with manager() as x, manager() as y: + pass + with manager() as x, manager(): + pass + + def testIfElseExpr(self): + # Test ifelse expressions in various cases + def _checkeval(msg, ret): + "helper to check that evaluation of expressions is done correctly" + print(x) + return ret + + self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True]) + self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True]) + self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True]) + self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5) + self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5) + self.assertEqual((5 and 6 if 0 else 1), 1) + self.assertEqual(((5 and 6) if 0 else 1), 1) + self.assertEqual((5 and (6 if 1 else 1)), 6) + self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3) + self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1) + self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5) + self.assertEqual((not 5 if 1 else 1), False) + self.assertEqual((not 5 if 0 else 1), 1) + self.assertEqual((6 + 1 if 1 else 2), 7) + self.assertEqual((6 - 1 if 1 else 2), 5) + self.assertEqual((6 * 2 if 1 else 4), 12) + self.assertEqual((6 / 2 if 1 else 3), 3) + self.assertEqual((6 < 4 if 0 else 2), 2) + + +def test_main(): + run_unittest(TokenTests, GrammarTests) + +if __name__ == '__main__': + test_main() diff --git a/lib3/2to3/lib2to3/tests/data/py3_test_grammar.py b/lib3/2to3/lib2to3/tests/data/py3_test_grammar.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/data/py3_test_grammar.py @@ -0,0 +1,923 @@ +# Python test set -- part 1, grammar. +# This just tests whether the parser accepts them all. + +# NOTE: When you run this test as a script from the command line, you +# get warnings about certain hex/oct constants. Since those are +# issued by the parser, you can't suppress them by adding a +# filterwarnings() call to this module. Therefore, to shut up the +# regression test, the filterwarnings() call has been added to +# regrtest.py. + +from test.support import run_unittest, check_syntax_error +import unittest +import sys +# testing import * +from sys import * + +class TokenTests(unittest.TestCase): + + def testBackslash(self): + # Backslash means line continuation: + x = 1 \ + + 1 + self.assertEquals(x, 2, 'backslash for line continuation') + + # Backslash does not means continuation in comments :\ + x = 0 + self.assertEquals(x, 0, 'backslash ending comment') + + def testPlainIntegers(self): + self.assertEquals(type(000), type(0)) + self.assertEquals(0xff, 255) + self.assertEquals(0o377, 255) + self.assertEquals(2147483647, 0o17777777777) + self.assertEquals(0b1001, 9) + # "0x" is not a valid literal + self.assertRaises(SyntaxError, eval, "0x") + from sys import maxsize + if maxsize == 2147483647: + self.assertEquals(-2147483647-1, -0o20000000000) + # XXX -2147483648 + self.assert_(0o37777777777 > 0) + self.assert_(0xffffffff > 0) + self.assert_(0b1111111111111111111111111111111 > 0) + for s in ('2147483648', '0o40000000000', '0x100000000', + '0b10000000000000000000000000000000'): + try: + x = eval(s) + except OverflowError: + self.fail("OverflowError on huge integer literal %r" % s) + elif maxsize == 9223372036854775807: + self.assertEquals(-9223372036854775807-1, -0o1000000000000000000000) + self.assert_(0o1777777777777777777777 > 0) + self.assert_(0xffffffffffffffff > 0) + self.assert_(0b11111111111111111111111111111111111111111111111111111111111111 > 0) + for s in '9223372036854775808', '0o2000000000000000000000', \ + '0x10000000000000000', \ + '0b100000000000000000000000000000000000000000000000000000000000000': + try: + x = eval(s) + except OverflowError: + self.fail("OverflowError on huge integer literal %r" % s) + else: + self.fail('Weird maxsize value %r' % maxsize) + + def testLongIntegers(self): + x = 0 + x = 0xffffffffffffffff + x = 0Xffffffffffffffff + x = 0o77777777777777777 + x = 0O77777777777777777 + x = 123456789012345678901234567890 + x = 0b100000000000000000000000000000000000000000000000000000000000000000000 + x = 0B111111111111111111111111111111111111111111111111111111111111111111111 + + def testFloats(self): + x = 3.14 + x = 314. + x = 0.314 + # XXX x = 000.314 + x = .314 + x = 3e14 + x = 3E14 + x = 3e-14 + x = 3e+14 + x = 3.e14 + x = .3e14 + x = 3.1e4 + + def testStringLiterals(self): + x = ''; y = ""; self.assert_(len(x) == 0 and x == y) + x = '\''; y = "'"; self.assert_(len(x) == 1 and x == y and ord(x) == 39) + x = '"'; y = "\""; self.assert_(len(x) == 1 and x == y and ord(x) == 34) + x = "doesn't \"shrink\" does it" + y = 'doesn\'t "shrink" does it' + self.assert_(len(x) == 24 and x == y) + x = "does \"shrink\" doesn't it" + y = 'does "shrink" doesn\'t it' + self.assert_(len(x) == 24 and x == y) + x = """ +The "quick" +brown fox +jumps over +the 'lazy' dog. +""" + y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n' + self.assertEquals(x, y) + y = ''' +The "quick" +brown fox +jumps over +the 'lazy' dog. +''' + self.assertEquals(x, y) + y = "\n\ +The \"quick\"\n\ +brown fox\n\ +jumps over\n\ +the 'lazy' dog.\n\ +" + self.assertEquals(x, y) + y = '\n\ +The \"quick\"\n\ +brown fox\n\ +jumps over\n\ +the \'lazy\' dog.\n\ +' + self.assertEquals(x, y) + + def testEllipsis(self): + x = ... + self.assert_(x is Ellipsis) + self.assertRaises(SyntaxError, eval, ".. .") + +class GrammarTests(unittest.TestCase): + + # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE + # XXX can't test in a script -- this rule is only used when interactive + + # file_input: (NEWLINE | stmt)* ENDMARKER + # Being tested as this very moment this very module + + # expr_input: testlist NEWLINE + # XXX Hard to test -- used only in calls to input() + + def testEvalInput(self): + # testlist ENDMARKER + x = eval('1, 0 or 1') + + def testFuncdef(self): + ### [decorators] 'def' NAME parameters ['->' test] ':' suite + ### decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE + ### decorators: decorator+ + ### parameters: '(' [typedargslist] ')' + ### typedargslist: ((tfpdef ['=' test] ',')* + ### ('*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef) + ### | tfpdef ['=' test] (',' tfpdef ['=' test])* [',']) + ### tfpdef: NAME [':' test] + ### varargslist: ((vfpdef ['=' test] ',')* + ### ('*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef) + ### | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) + ### vfpdef: NAME + def f1(): pass + f1() + f1(*()) + f1(*(), **{}) + def f2(one_argument): pass + def f3(two, arguments): pass + self.assertEquals(f2.__code__.co_varnames, ('one_argument',)) + self.assertEquals(f3.__code__.co_varnames, ('two', 'arguments')) + def a1(one_arg,): pass + def a2(two, args,): pass + def v0(*rest): pass + def v1(a, *rest): pass + def v2(a, b, *rest): pass + + f1() + f2(1) + f2(1,) + f3(1, 2) + f3(1, 2,) + v0() + v0(1) + v0(1,) + v0(1,2) + v0(1,2,3,4,5,6,7,8,9,0) + v1(1) + v1(1,) + v1(1,2) + v1(1,2,3) + v1(1,2,3,4,5,6,7,8,9,0) + v2(1,2) + v2(1,2,3) + v2(1,2,3,4) + v2(1,2,3,4,5,6,7,8,9,0) + + def d01(a=1): pass + d01() + d01(1) + d01(*(1,)) + d01(**{'a':2}) + def d11(a, b=1): pass + d11(1) + d11(1, 2) + d11(1, **{'b':2}) + def d21(a, b, c=1): pass + d21(1, 2) + d21(1, 2, 3) + d21(*(1, 2, 3)) + d21(1, *(2, 3)) + d21(1, 2, *(3,)) + d21(1, 2, **{'c':3}) + def d02(a=1, b=2): pass + d02() + d02(1) + d02(1, 2) + d02(*(1, 2)) + d02(1, *(2,)) + d02(1, **{'b':2}) + d02(**{'a': 1, 'b': 2}) + def d12(a, b=1, c=2): pass + d12(1) + d12(1, 2) + d12(1, 2, 3) + def d22(a, b, c=1, d=2): pass + d22(1, 2) + d22(1, 2, 3) + d22(1, 2, 3, 4) + def d01v(a=1, *rest): pass + d01v() + d01v(1) + d01v(1, 2) + d01v(*(1, 2, 3, 4)) + d01v(*(1,)) + d01v(**{'a':2}) + def d11v(a, b=1, *rest): pass + d11v(1) + d11v(1, 2) + d11v(1, 2, 3) + def d21v(a, b, c=1, *rest): pass + d21v(1, 2) + d21v(1, 2, 3) + d21v(1, 2, 3, 4) + d21v(*(1, 2, 3, 4)) + d21v(1, 2, **{'c': 3}) + def d02v(a=1, b=2, *rest): pass + d02v() + d02v(1) + d02v(1, 2) + d02v(1, 2, 3) + d02v(1, *(2, 3, 4)) + d02v(**{'a': 1, 'b': 2}) + def d12v(a, b=1, c=2, *rest): pass + d12v(1) + d12v(1, 2) + d12v(1, 2, 3) + d12v(1, 2, 3, 4) + d12v(*(1, 2, 3, 4)) + d12v(1, 2, *(3, 4, 5)) + d12v(1, *(2,), **{'c': 3}) + def d22v(a, b, c=1, d=2, *rest): pass + d22v(1, 2) + d22v(1, 2, 3) + d22v(1, 2, 3, 4) + d22v(1, 2, 3, 4, 5) + d22v(*(1, 2, 3, 4)) + d22v(1, 2, *(3, 4, 5)) + d22v(1, *(2, 3), **{'d': 4}) + + # keyword argument type tests + try: + str('x', **{b'foo':1 }) + except TypeError: + pass + else: + self.fail('Bytes should not work as keyword argument names') + # keyword only argument tests + def pos0key1(*, key): return key + pos0key1(key=100) + def pos2key2(p1, p2, *, k1, k2=100): return p1,p2,k1,k2 + pos2key2(1, 2, k1=100) + pos2key2(1, 2, k1=100, k2=200) + pos2key2(1, 2, k2=100, k1=200) + def pos2key2dict(p1, p2, *, k1=100, k2, **kwarg): return p1,p2,k1,k2,kwarg + pos2key2dict(1,2,k2=100,tokwarg1=100,tokwarg2=200) + pos2key2dict(1,2,tokwarg1=100,tokwarg2=200, k2=100) + + # keyword arguments after *arglist + def f(*args, **kwargs): + return args, kwargs + self.assertEquals(f(1, x=2, *[3, 4], y=5), ((1, 3, 4), + {'x':2, 'y':5})) + self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)") + self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)") + + # argument annotation tests + def f(x) -> list: pass + self.assertEquals(f.__annotations__, {'return': list}) + def f(x:int): pass + self.assertEquals(f.__annotations__, {'x': int}) + def f(*x:str): pass + self.assertEquals(f.__annotations__, {'x': str}) + def f(**x:float): pass + self.assertEquals(f.__annotations__, {'x': float}) + def f(x, y:1+2): pass + self.assertEquals(f.__annotations__, {'y': 3}) + def f(a, b:1, c:2, d): pass + self.assertEquals(f.__annotations__, {'b': 1, 'c': 2}) + def f(a, b:1, c:2, d, e:3=4, f=5, *g:6): pass + self.assertEquals(f.__annotations__, + {'b': 1, 'c': 2, 'e': 3, 'g': 6}) + def f(a, b:1, c:2, d, e:3=4, f=5, *g:6, h:7, i=8, j:9=10, + **k:11) -> 12: pass + self.assertEquals(f.__annotations__, + {'b': 1, 'c': 2, 'e': 3, 'g': 6, 'h': 7, 'j': 9, + 'k': 11, 'return': 12}) + # Check for SF Bug #1697248 - mixing decorators and a return annotation + def null(x): return x + @null + def f(x) -> list: pass + self.assertEquals(f.__annotations__, {'return': list}) + + # test MAKE_CLOSURE with a variety of oparg's + closure = 1 + def f(): return closure + def f(x=1): return closure + def f(*, k=1): return closure + def f() -> int: return closure + + # Check ast errors in *args and *kwargs + check_syntax_error(self, "f(*g(1=2))") + check_syntax_error(self, "f(**g(1=2))") + + def testLambdef(self): + ### lambdef: 'lambda' [varargslist] ':' test + l1 = lambda : 0 + self.assertEquals(l1(), 0) + l2 = lambda : a[d] # XXX just testing the expression + l3 = lambda : [2 < x for x in [-1, 3, 0]] + self.assertEquals(l3(), [0, 1, 0]) + l4 = lambda x = lambda y = lambda z=1 : z : y() : x() + self.assertEquals(l4(), 1) + l5 = lambda x, y, z=2: x + y + z + self.assertEquals(l5(1, 2), 5) + self.assertEquals(l5(1, 2, 3), 6) + check_syntax_error(self, "lambda x: x = 2") + check_syntax_error(self, "lambda (None,): None") + l6 = lambda x, y, *, k=20: x+y+k + self.assertEquals(l6(1,2), 1+2+20) + self.assertEquals(l6(1,2,k=10), 1+2+10) + + + ### stmt: simple_stmt | compound_stmt + # Tested below + + def testSimpleStmt(self): + ### simple_stmt: small_stmt (';' small_stmt)* [';'] + x = 1; pass; del x + def foo(): + # verify statments that end with semi-colons + x = 1; pass; del x; + foo() + + ### small_stmt: expr_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt + # Tested below + + def testExprStmt(self): + # (exprlist '=')* exprlist + 1 + 1, 2, 3 + x = 1 + x = 1, 2, 3 + x = y = z = 1, 2, 3 + x, y, z = 1, 2, 3 + abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4) + + check_syntax_error(self, "x + 1 = 1") + check_syntax_error(self, "a + 1 = b + 2") + + def testDelStmt(self): + # 'del' exprlist + abc = [1,2,3] + x, y, z = abc + xyz = x, y, z + + del abc + del x, y, (z, xyz) + + def testPassStmt(self): + # 'pass' + pass + + # flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt + # Tested below + + def testBreakStmt(self): + # 'break' + while 1: break + + def testContinueStmt(self): + # 'continue' + i = 1 + while i: i = 0; continue + + msg = "" + while not msg: + msg = "ok" + try: + continue + msg = "continue failed to continue inside try" + except: + msg = "continue inside try called except block" + if msg != "ok": + self.fail(msg) + + msg = "" + while not msg: + msg = "finally block not called" + try: + continue + finally: + msg = "ok" + if msg != "ok": + self.fail(msg) + + def test_break_continue_loop(self): + # This test warrants an explanation. It is a test specifically for SF bugs + # #463359 and #462937. The bug is that a 'break' statement executed or + # exception raised inside a try/except inside a loop, *after* a continue + # statement has been executed in that loop, will cause the wrong number of + # arguments to be popped off the stack and the instruction pointer reset to + # a very small number (usually 0.) Because of this, the following test + # *must* written as a function, and the tracking vars *must* be function + # arguments with default values. Otherwise, the test will loop and loop. + + def test_inner(extra_burning_oil = 1, count=0): + big_hippo = 2 + while big_hippo: + count += 1 + try: + if extra_burning_oil and big_hippo == 1: + extra_burning_oil -= 1 + break + big_hippo -= 1 + continue + except: + raise + if count > 2 or big_hippo != 1: + self.fail("continue then break in try/except in loop broken!") + test_inner() + + def testReturn(self): + # 'return' [testlist] + def g1(): return + def g2(): return 1 + g1() + x = g2() + check_syntax_error(self, "class foo:return 1") + + def testYield(self): + check_syntax_error(self, "class foo:yield 1") + + def testRaise(self): + # 'raise' test [',' test] + try: raise RuntimeError('just testing') + except RuntimeError: pass + try: raise KeyboardInterrupt + except KeyboardInterrupt: pass + + def testImport(self): + # 'import' dotted_as_names + import sys + import time, sys + # 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names) + from time import time + from time import (time) + # not testable inside a function, but already done at top of the module + # from sys import * + from sys import path, argv + from sys import (path, argv) + from sys import (path, argv,) + + def testGlobal(self): + # 'global' NAME (',' NAME)* + global a + global a, b + global one, two, three, four, five, six, seven, eight, nine, ten + + def testNonlocal(self): + # 'nonlocal' NAME (',' NAME)* + x = 0 + y = 0 + def f(): + nonlocal x + nonlocal x, y + + def testAssert(self): + # assert_stmt: 'assert' test [',' test] + assert 1 + assert 1, 1 + assert lambda x:x + assert 1, lambda x:x+1 + try: + assert 0, "msg" + except AssertionError as e: + self.assertEquals(e.args[0], "msg") + else: + if __debug__: + self.fail("AssertionError not raised by assert 0") + + ### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef + # Tested below + + def testIf(self): + # 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] + if 1: pass + if 1: pass + else: pass + if 0: pass + elif 0: pass + if 0: pass + elif 0: pass + elif 0: pass + elif 0: pass + else: pass + + def testWhile(self): + # 'while' test ':' suite ['else' ':' suite] + while 0: pass + while 0: pass + else: pass + + # Issue1920: "while 0" is optimized away, + # ensure that the "else" clause is still present. + x = 0 + while 0: + x = 1 + else: + x = 2 + self.assertEquals(x, 2) + + def testFor(self): + # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite] + for i in 1, 2, 3: pass + for i, j, k in (): pass + else: pass + class Squares: + def __init__(self, max): + self.max = max + self.sofar = [] + def __len__(self): return len(self.sofar) + def __getitem__(self, i): + if not 0 <= i < self.max: raise IndexError + n = len(self.sofar) + while n <= i: + self.sofar.append(n*n) + n = n+1 + return self.sofar[i] + n = 0 + for x in Squares(10): n = n+x + if n != 285: + self.fail('for over growing sequence') + + result = [] + for x, in [(1,), (2,), (3,)]: + result.append(x) + self.assertEqual(result, [1, 2, 3]) + + def testTry(self): + ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite] + ### | 'try' ':' suite 'finally' ':' suite + ### except_clause: 'except' [expr ['as' expr]] + try: + 1/0 + except ZeroDivisionError: + pass + else: + pass + try: 1/0 + except EOFError: pass + except TypeError as msg: pass + except RuntimeError as msg: pass + except: pass + else: pass + try: 1/0 + except (EOFError, TypeError, ZeroDivisionError): pass + try: 1/0 + except (EOFError, TypeError, ZeroDivisionError) as msg: pass + try: pass + finally: pass + + def testSuite(self): + # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT + if 1: pass + if 1: + pass + if 1: + # + # + # + pass + pass + # + pass + # + + def testTest(self): + ### and_test ('or' and_test)* + ### and_test: not_test ('and' not_test)* + ### not_test: 'not' not_test | comparison + if not 1: pass + if 1 and 1: pass + if 1 or 1: pass + if not not not 1: pass + if not 1 and 1 and 1: pass + if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass + + def testComparison(self): + ### comparison: expr (comp_op expr)* + ### comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not' + if 1: pass + x = (1 == 1) + if 1 == 1: pass + if 1 != 1: pass + if 1 < 1: pass + if 1 > 1: pass + if 1 <= 1: pass + if 1 >= 1: pass + if 1 is 1: pass + if 1 is not 1: pass + if 1 in (): pass + if 1 not in (): pass + if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in 1 is 1 is not 1: pass + + def testBinaryMaskOps(self): + x = 1 & 1 + x = 1 ^ 1 + x = 1 | 1 + + def testShiftOps(self): + x = 1 << 1 + x = 1 >> 1 + x = 1 << 1 >> 1 + + def testAdditiveOps(self): + x = 1 + x = 1 + 1 + x = 1 - 1 - 1 + x = 1 - 1 + 1 - 1 + 1 + + def testMultiplicativeOps(self): + x = 1 * 1 + x = 1 / 1 + x = 1 % 1 + x = 1 / 1 * 1 % 1 + + def testUnaryOps(self): + x = +1 + x = -1 + x = ~1 + x = ~1 ^ 1 & 1 | 1 & 1 ^ -1 + x = -1*1/1 + 1*1 - ---1*1 + + def testSelectors(self): + ### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME + ### subscript: expr | [expr] ':' [expr] + + import sys, time + c = sys.path[0] + x = time.time() + x = sys.modules['time'].time() + a = '01234' + c = a[0] + c = a[-1] + s = a[0:5] + s = a[:5] + s = a[0:] + s = a[:] + s = a[-5:] + s = a[:-1] + s = a[-4:-3] + # A rough test of SF bug 1333982. http://python.org/sf/1333982 + # The testing here is fairly incomplete. + # Test cases should include: commas with 1 and 2 colons + d = {} + d[1] = 1 + d[1,] = 2 + d[1,2] = 3 + d[1,2,3] = 4 + L = list(d) + L.sort(key=lambda x: x if isinstance(x, tuple) else ()) + self.assertEquals(str(L), '[1, (1,), (1, 2), (1, 2, 3)]') + + def testAtoms(self): + ### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictsetmaker] '}' | NAME | NUMBER | STRING + ### dictsetmaker: (test ':' test (',' test ':' test)* [',']) | (test (',' test)* [',']) + + x = (1) + x = (1 or 2 or 3) + x = (1 or 2 or 3, 2, 3) + + x = [] + x = [1] + x = [1 or 2 or 3] + x = [1 or 2 or 3, 2, 3] + x = [] + + x = {} + x = {'one': 1} + x = {'one': 1,} + x = {'one' or 'two': 1 or 2} + x = {'one': 1, 'two': 2} + x = {'one': 1, 'two': 2,} + x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6} + + x = {'one'} + x = {'one', 1,} + x = {'one', 'two', 'three'} + x = {2, 3, 4,} + + x = x + x = 'x' + x = 123 + + ### exprlist: expr (',' expr)* [','] + ### testlist: test (',' test)* [','] + # These have been exercised enough above + + def testClassdef(self): + # 'class' NAME ['(' [testlist] ')'] ':' suite + class B: pass + class B2(): pass + class C1(B): pass + class C2(B): pass + class D(C1, C2, B): pass + class C: + def meth1(self): pass + def meth2(self, arg): pass + def meth3(self, a1, a2): pass + + # decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE + # decorators: decorator+ + # decorated: decorators (classdef | funcdef) + def class_decorator(x): return x + @class_decorator + class G: pass + + def testDictcomps(self): + # dictorsetmaker: ( (test ':' test (comp_for | + # (',' test ':' test)* [','])) | + # (test (comp_for | (',' test)* [','])) ) + nums = [1, 2, 3] + self.assertEqual({i:i+1 for i in nums}, {1: 2, 2: 3, 3: 4}) + + def testListcomps(self): + # list comprehension tests + nums = [1, 2, 3, 4, 5] + strs = ["Apple", "Banana", "Coconut"] + spcs = [" Apple", " Banana ", "Coco nut "] + + self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut']) + self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15]) + self.assertEqual([x for x in nums if x > 2], [3, 4, 5]) + self.assertEqual([(i, s) for i in nums for s in strs], + [(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'), + (2, 'Apple'), (2, 'Banana'), (2, 'Coconut'), + (3, 'Apple'), (3, 'Banana'), (3, 'Coconut'), + (4, 'Apple'), (4, 'Banana'), (4, 'Coconut'), + (5, 'Apple'), (5, 'Banana'), (5, 'Coconut')]) + self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]], + [(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'), + (3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'), + (5, 'Banana'), (5, 'Coconut')]) + self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)], + [[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]]) + + def test_in_func(l): + return [0 < x < 3 for x in l if x > 2] + + self.assertEqual(test_in_func(nums), [False, False, False]) + + def test_nested_front(): + self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]], + [[1, 2], [3, 4], [5, 6]]) + + test_nested_front() + + check_syntax_error(self, "[i, s for i in nums for s in strs]") + check_syntax_error(self, "[x if y]") + + suppliers = [ + (1, "Boeing"), + (2, "Ford"), + (3, "Macdonalds") + ] + + parts = [ + (10, "Airliner"), + (20, "Engine"), + (30, "Cheeseburger") + ] + + suppart = [ + (1, 10), (1, 20), (2, 20), (3, 30) + ] + + x = [ + (sname, pname) + for (sno, sname) in suppliers + for (pno, pname) in parts + for (sp_sno, sp_pno) in suppart + if sno == sp_sno and pno == sp_pno + ] + + self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'), + ('Macdonalds', 'Cheeseburger')]) + + def testGenexps(self): + # generator expression tests + g = ([x for x in range(10)] for x in range(1)) + self.assertEqual(next(g), [x for x in range(10)]) + try: + next(g) + self.fail('should produce StopIteration exception') + except StopIteration: + pass + + a = 1 + try: + g = (a for d in a) + next(g) + self.fail('should produce TypeError') + except TypeError: + pass + + self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd']) + self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy']) + + a = [x for x in range(10)] + b = (x for x in (y for y in a)) + self.assertEqual(sum(b), sum([x for x in range(10)])) + + self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)])) + self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2])) + self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)])) + self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0) + check_syntax_error(self, "foo(x for x in range(10), 100)") + check_syntax_error(self, "foo(100, x for x in range(10))") + + def testComprehensionSpecials(self): + # test for outmost iterable precomputation + x = 10; g = (i for i in range(x)); x = 5 + self.assertEqual(len(list(g)), 10) + + # This should hold, since we're only precomputing outmost iterable. + x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x)) + x = 5; t = True; + self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g)) + + # Grammar allows multiple adjacent 'if's in listcomps and genexps, + # even though it's silly. Make sure it works (ifelse broke this.) + self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7]) + self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7]) + + # verify unpacking single element tuples in listcomp/genexp. + self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6]) + self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9]) + + def test_with_statement(self): + class manager(object): + def __enter__(self): + return (1, 2) + def __exit__(self, *args): + pass + + with manager(): + pass + with manager() as x: + pass + with manager() as (x, y): + pass + with manager(), manager(): + pass + with manager() as x, manager() as y: + pass + with manager() as x, manager(): + pass + + def testIfElseExpr(self): + # Test ifelse expressions in various cases + def _checkeval(msg, ret): + "helper to check that evaluation of expressions is done correctly" + print(x) + return ret + + # the next line is not allowed anymore + #self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True]) + self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True]) + self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True]) + self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5) + self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5) + self.assertEqual((5 and 6 if 0 else 1), 1) + self.assertEqual(((5 and 6) if 0 else 1), 1) + self.assertEqual((5 and (6 if 1 else 1)), 6) + self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3) + self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1) + self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5) + self.assertEqual((not 5 if 1 else 1), False) + self.assertEqual((not 5 if 0 else 1), 1) + self.assertEqual((6 + 1 if 1 else 2), 7) + self.assertEqual((6 - 1 if 1 else 2), 5) + self.assertEqual((6 * 2 if 1 else 4), 12) + self.assertEqual((6 / 2 if 1 else 3), 3) + self.assertEqual((6 < 4 if 0 else 2), 2) + + +def test_main(): + run_unittest(TokenTests, GrammarTests) + +if __name__ == '__main__': + test_main() diff --git a/lib3/2to3/lib2to3/tests/pytree_idempotency.py b/lib3/2to3/lib2to3/tests/pytree_idempotency.py new file mode 100755 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/pytree_idempotency.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Main program for testing the infrastructure.""" + +__author__ = "Guido van Rossum " + +# Support imports (need to be imported first) +from . import support + +# Python imports +import os +import sys +import logging + +# Local imports +from .. import pytree +import pgen2 +from pgen2 import driver + +logging.basicConfig() + +def main(): + gr = driver.load_grammar("Grammar.txt") + dr = driver.Driver(gr, convert=pytree.convert) + + fn = "example.py" + tree = dr.parse_file(fn, debug=True) + if not diff(fn, tree): + print("No diffs.") + if not sys.argv[1:]: + return # Pass a dummy argument to run the complete test suite below + + problems = [] + + # Process every imported module + for name in sys.modules: + mod = sys.modules[name] + if mod is None or not hasattr(mod, "__file__"): + continue + fn = mod.__file__ + if fn.endswith(".pyc"): + fn = fn[:-1] + if not fn.endswith(".py"): + continue + print("Parsing", fn, file=sys.stderr) + tree = dr.parse_file(fn, debug=True) + if diff(fn, tree): + problems.append(fn) + + # Process every single module on sys.path (but not in packages) + for dir in sys.path: + try: + names = os.listdir(dir) + except os.error: + continue + print("Scanning", dir, "...", file=sys.stderr) + for name in names: + if not name.endswith(".py"): + continue + print("Parsing", name, file=sys.stderr) + fn = os.path.join(dir, name) + try: + tree = dr.parse_file(fn, debug=True) + except pgen2.parse.ParseError as err: + print("ParseError:", err) + else: + if diff(fn, tree): + problems.append(fn) + + # Show summary of problem files + if not problems: + print("No problems. Congratulations!") + else: + print("Problems in following files:") + for fn in problems: + print("***", fn) + +def diff(fn, tree): + f = open("@", "w") + try: + f.write(str(tree)) + finally: + f.close() + try: + return os.system("diff -u %s @" % fn) + finally: + os.remove("@") + +if __name__ == "__main__": + main() diff --git a/lib3/2to3/lib2to3/tests/support.py b/lib3/2to3/lib2to3/tests/support.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/support.py @@ -0,0 +1,54 @@ +"""Support code for test_*.py files""" +# Author: Collin Winter + +# Python imports +import unittest +import sys +import os +import os.path +import re +from textwrap import dedent + +# Local imports +from lib2to3 import pytree, refactor +from lib2to3.pgen2 import driver + +test_dir = os.path.dirname(__file__) +proj_dir = os.path.normpath(os.path.join(test_dir, "..")) +grammar_path = os.path.join(test_dir, "..", "Grammar.txt") +grammar = driver.load_grammar(grammar_path) +driver = driver.Driver(grammar, convert=pytree.convert) + +def parse_string(string): + return driver.parse_string(reformat(string), debug=True) + +def run_all_tests(test_mod=None, tests=None): + if tests is None: + tests = unittest.TestLoader().loadTestsFromModule(test_mod) + unittest.TextTestRunner(verbosity=2).run(tests) + +def reformat(string): + return dedent(string) + "\n\n" + +def get_refactorer(fixer_pkg="lib2to3", fixers=None, options=None): + """ + A convenience function for creating a RefactoringTool for tests. + + fixers is a list of fixers for the RefactoringTool to use. By default + "lib2to3.fixes.*" is used. options is an optional dictionary of options to + be passed to the RefactoringTool. + """ + if fixers is not None: + fixers = [fixer_pkg + ".fixes.fix_" + fix for fix in fixers] + else: + fixers = refactor.get_fixers_from_package(fixer_pkg + ".fixes") + options = options or {} + return refactor.RefactoringTool(fixers, options, explicit=True) + +def all_project_files(): + for dirpath, dirnames, filenames in os.walk(proj_dir): + for filename in filenames: + if filename.endswith(".py"): + yield os.path.join(dirpath, filename) + +TestCase = unittest.TestCase diff --git a/lib3/2to3/lib2to3/tests/test_all_fixers.py b/lib3/2to3/lib2to3/tests/test_all_fixers.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/test_all_fixers.py @@ -0,0 +1,23 @@ +"""Tests that run all fixer modules over an input stream. + +This has been broken out into its own test module because of its +running time. +""" +# Author: Collin Winter + +# Python imports +import unittest + +# Local imports +from lib2to3 import refactor +from . import support + + +class Test_all(support.TestCase): + + def setUp(self): + self.refactor = support.get_refactorer() + + def test_all_project_files(self): + for filepath in support.all_project_files(): + self.refactor.refactor_file(filepath) diff --git a/lib3/2to3/lib2to3/tests/test_fixers.py b/lib3/2to3/lib2to3/tests/test_fixers.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/test_fixers.py @@ -0,0 +1,4515 @@ +""" Test suite for the fixer modules """ + +# Python imports +import os +import unittest +from itertools import chain +from operator import itemgetter + +# Local imports +from lib2to3 import pygram, pytree, refactor, fixer_util +from lib2to3.tests import support + + +class FixerTestCase(support.TestCase): + + # Other test cases can subclass this class and replace "fixer_pkg" with + # their own. + def setUp(self, fix_list=None, fixer_pkg="lib2to3", options=None): + if fix_list is None: + fix_list = [self.fixer] + self.refactor = support.get_refactorer(fixer_pkg, fix_list, options) + self.fixer_log = [] + self.filename = "" + + for fixer in chain(self.refactor.pre_order, + self.refactor.post_order): + fixer.log = self.fixer_log + + def _check(self, before, after): + before = support.reformat(before) + after = support.reformat(after) + tree = self.refactor.refactor_string(before, self.filename) + self.assertEqual(after, str(tree)) + return tree + + def check(self, before, after, ignore_warnings=False): + tree = self._check(before, after) + self.assertTrue(tree.was_changed) + if not ignore_warnings: + self.assertEqual(self.fixer_log, []) + + def warns(self, before, after, message, unchanged=False): + tree = self._check(before, after) + self.assertTrue(message in "".join(self.fixer_log)) + if not unchanged: + self.assertTrue(tree.was_changed) + + def warns_unchanged(self, before, message): + self.warns(before, before, message, unchanged=True) + + def unchanged(self, before, ignore_warnings=False): + self._check(before, before) + if not ignore_warnings: + self.assertEqual(self.fixer_log, []) + + def assert_runs_after(self, *names): + fixes = [self.fixer] + fixes.extend(names) + r = support.get_refactorer("lib2to3", fixes) + (pre, post) = r.get_fixers() + n = "fix_" + self.fixer + if post and post[-1].__class__.__module__.endswith(n): + # We're the last fixer to run + return + if pre and pre[-1].__class__.__module__.endswith(n) and not post: + # We're the last in pre and post is empty + return + self.fail("Fixer run order (%s) is incorrect; %s should be last."\ + %(", ".join([x.__class__.__module__ for x in (pre+post)]), n)) + +class Test_ne(FixerTestCase): + fixer = "ne" + + def test_basic(self): + b = """if x <> y: + pass""" + + a = """if x != y: + pass""" + self.check(b, a) + + def test_no_spaces(self): + b = """if x<>y: + pass""" + + a = """if x!=y: + pass""" + self.check(b, a) + + def test_chained(self): + b = """if x<>y<>z: + pass""" + + a = """if x!=y!=z: + pass""" + self.check(b, a) + +class Test_has_key(FixerTestCase): + fixer = "has_key" + + def test_1(self): + b = """x = d.has_key("x") or d.has_key("y")""" + a = """x = "x" in d or "y" in d""" + self.check(b, a) + + def test_2(self): + b = """x = a.b.c.d.has_key("x") ** 3""" + a = """x = ("x" in a.b.c.d) ** 3""" + self.check(b, a) + + def test_3(self): + b = """x = a.b.has_key(1 + 2).__repr__()""" + a = """x = (1 + 2 in a.b).__repr__()""" + self.check(b, a) + + def test_4(self): + b = """x = a.b.has_key(1 + 2).__repr__() ** -3 ** 4""" + a = """x = (1 + 2 in a.b).__repr__() ** -3 ** 4""" + self.check(b, a) + + def test_5(self): + b = """x = a.has_key(f or g)""" + a = """x = (f or g) in a""" + self.check(b, a) + + def test_6(self): + b = """x = a + b.has_key(c)""" + a = """x = a + (c in b)""" + self.check(b, a) + + def test_7(self): + b = """x = a.has_key(lambda: 12)""" + a = """x = (lambda: 12) in a""" + self.check(b, a) + + def test_8(self): + b = """x = a.has_key(a for a in b)""" + a = """x = (a for a in b) in a""" + self.check(b, a) + + def test_9(self): + b = """if not a.has_key(b): pass""" + a = """if b not in a: pass""" + self.check(b, a) + + def test_10(self): + b = """if not a.has_key(b).__repr__(): pass""" + a = """if not (b in a).__repr__(): pass""" + self.check(b, a) + + def test_11(self): + b = """if not a.has_key(b) ** 2: pass""" + a = """if not (b in a) ** 2: pass""" + self.check(b, a) + +class Test_apply(FixerTestCase): + fixer = "apply" + + def test_1(self): + b = """x = apply(f, g + h)""" + a = """x = f(*g + h)""" + self.check(b, a) + + def test_2(self): + b = """y = apply(f, g, h)""" + a = """y = f(*g, **h)""" + self.check(b, a) + + def test_3(self): + b = """z = apply(fs[0], g or h, h or g)""" + a = """z = fs[0](*g or h, **h or g)""" + self.check(b, a) + + def test_4(self): + b = """apply(f, (x, y) + t)""" + a = """f(*(x, y) + t)""" + self.check(b, a) + + def test_5(self): + b = """apply(f, args,)""" + a = """f(*args)""" + self.check(b, a) + + def test_6(self): + b = """apply(f, args, kwds,)""" + a = """f(*args, **kwds)""" + self.check(b, a) + + # Test that complex functions are parenthesized + + def test_complex_1(self): + b = """x = apply(f+g, args)""" + a = """x = (f+g)(*args)""" + self.check(b, a) + + def test_complex_2(self): + b = """x = apply(f*g, args)""" + a = """x = (f*g)(*args)""" + self.check(b, a) + + def test_complex_3(self): + b = """x = apply(f**g, args)""" + a = """x = (f**g)(*args)""" + self.check(b, a) + + # But dotted names etc. not + + def test_dotted_name(self): + b = """x = apply(f.g, args)""" + a = """x = f.g(*args)""" + self.check(b, a) + + def test_subscript(self): + b = """x = apply(f[x], args)""" + a = """x = f[x](*args)""" + self.check(b, a) + + def test_call(self): + b = """x = apply(f(), args)""" + a = """x = f()(*args)""" + self.check(b, a) + + # Extreme case + def test_extreme(self): + b = """x = apply(a.b.c.d.e.f, args, kwds)""" + a = """x = a.b.c.d.e.f(*args, **kwds)""" + self.check(b, a) + + # XXX Comments in weird places still get lost + def test_weird_comments(self): + b = """apply( # foo + f, # bar + args)""" + a = """f(*args)""" + self.check(b, a) + + # These should *not* be touched + + def test_unchanged_1(self): + s = """apply()""" + self.unchanged(s) + + def test_unchanged_2(self): + s = """apply(f)""" + self.unchanged(s) + + def test_unchanged_3(self): + s = """apply(f,)""" + self.unchanged(s) + + def test_unchanged_4(self): + s = """apply(f, args, kwds, extras)""" + self.unchanged(s) + + def test_unchanged_5(self): + s = """apply(f, *args, **kwds)""" + self.unchanged(s) + + def test_unchanged_6(self): + s = """apply(f, *args)""" + self.unchanged(s) + + def test_unchanged_7(self): + s = """apply(func=f, args=args, kwds=kwds)""" + self.unchanged(s) + + def test_unchanged_8(self): + s = """apply(f, args=args, kwds=kwds)""" + self.unchanged(s) + + def test_unchanged_9(self): + s = """apply(f, args, kwds=kwds)""" + self.unchanged(s) + + def test_space_1(self): + a = """apply( f, args, kwds)""" + b = """f(*args, **kwds)""" + self.check(a, b) + + def test_space_2(self): + a = """apply( f ,args,kwds )""" + b = """f(*args, **kwds)""" + self.check(a, b) + +class Test_intern(FixerTestCase): + fixer = "intern" + + def test_prefix_preservation(self): + b = """x = intern( a )""" + a = """import sys\nx = sys.intern( a )""" + self.check(b, a) + + b = """y = intern("b" # test + )""" + a = """import sys\ny = sys.intern("b" # test + )""" + self.check(b, a) + + b = """z = intern(a+b+c.d, )""" + a = """import sys\nz = sys.intern(a+b+c.d, )""" + self.check(b, a) + + def test(self): + b = """x = intern(a)""" + a = """import sys\nx = sys.intern(a)""" + self.check(b, a) + + b = """z = intern(a+b+c.d,)""" + a = """import sys\nz = sys.intern(a+b+c.d,)""" + self.check(b, a) + + b = """intern("y%s" % 5).replace("y", "")""" + a = """import sys\nsys.intern("y%s" % 5).replace("y", "")""" + self.check(b, a) + + # These should not be refactored + + def test_unchanged(self): + s = """intern(a=1)""" + self.unchanged(s) + + s = """intern(f, g)""" + self.unchanged(s) + + s = """intern(*h)""" + self.unchanged(s) + + s = """intern(**i)""" + self.unchanged(s) + + s = """intern()""" + self.unchanged(s) + +class Test_reduce(FixerTestCase): + fixer = "reduce" + + def test_simple_call(self): + b = "reduce(a, b, c)" + a = "from functools import reduce\nreduce(a, b, c)" + self.check(b, a) + + def test_bug_7253(self): + # fix_tuple_params was being bad and orphaning nodes in the tree. + b = "def x(arg): reduce(sum, [])" + a = "from functools import reduce\ndef x(arg): reduce(sum, [])" + self.check(b, a) + + def test_call_with_lambda(self): + b = "reduce(lambda x, y: x + y, seq)" + a = "from functools import reduce\nreduce(lambda x, y: x + y, seq)" + self.check(b, a) + + def test_unchanged(self): + s = "reduce(a)" + self.unchanged(s) + + s = "reduce(a, b=42)" + self.unchanged(s) + + s = "reduce(a, b, c, d)" + self.unchanged(s) + + s = "reduce(**c)" + self.unchanged(s) + + s = "reduce()" + self.unchanged(s) + +class Test_print(FixerTestCase): + fixer = "print" + + def test_prefix_preservation(self): + b = """print 1, 1+1, 1+1+1""" + a = """print(1, 1+1, 1+1+1)""" + self.check(b, a) + + def test_idempotency(self): + s = """print()""" + self.unchanged(s) + + s = """print('')""" + self.unchanged(s) + + def test_idempotency_print_as_function(self): + self.refactor.driver.grammar = pygram.python_grammar_no_print_statement + s = """print(1, 1+1, 1+1+1)""" + self.unchanged(s) + + s = """print()""" + self.unchanged(s) + + s = """print('')""" + self.unchanged(s) + + def test_1(self): + b = """print 1, 1+1, 1+1+1""" + a = """print(1, 1+1, 1+1+1)""" + self.check(b, a) + + def test_2(self): + b = """print 1, 2""" + a = """print(1, 2)""" + self.check(b, a) + + def test_3(self): + b = """print""" + a = """print()""" + self.check(b, a) + + def test_4(self): + # from bug 3000 + b = """print whatever; print""" + a = """print(whatever); print()""" + self.check(b, a) + + def test_5(self): + b = """print; print whatever;""" + a = """print(); print(whatever);""" + self.check(b, a) + + def test_tuple(self): + b = """print (a, b, c)""" + a = """print((a, b, c))""" + self.check(b, a) + + # trailing commas + + def test_trailing_comma_1(self): + b = """print 1, 2, 3,""" + a = """print(1, 2, 3, end=' ')""" + self.check(b, a) + + def test_trailing_comma_2(self): + b = """print 1, 2,""" + a = """print(1, 2, end=' ')""" + self.check(b, a) + + def test_trailing_comma_3(self): + b = """print 1,""" + a = """print(1, end=' ')""" + self.check(b, a) + + # >> stuff + + def test_vargs_without_trailing_comma(self): + b = """print >>sys.stderr, 1, 2, 3""" + a = """print(1, 2, 3, file=sys.stderr)""" + self.check(b, a) + + def test_with_trailing_comma(self): + b = """print >>sys.stderr, 1, 2,""" + a = """print(1, 2, end=' ', file=sys.stderr)""" + self.check(b, a) + + def test_no_trailing_comma(self): + b = """print >>sys.stderr, 1+1""" + a = """print(1+1, file=sys.stderr)""" + self.check(b, a) + + def test_spaces_before_file(self): + b = """print >> sys.stderr""" + a = """print(file=sys.stderr)""" + self.check(b, a) + + def test_with_future_print_function(self): + s = "from __future__ import print_function\n" \ + "print('Hai!', end=' ')" + self.unchanged(s) + + b = "print 'Hello, world!'" + a = "print('Hello, world!')" + self.check(b, a) + + +class Test_exec(FixerTestCase): + fixer = "exec" + + def test_prefix_preservation(self): + b = """ exec code in ns1, ns2""" + a = """ exec(code, ns1, ns2)""" + self.check(b, a) + + def test_basic(self): + b = """exec code""" + a = """exec(code)""" + self.check(b, a) + + def test_with_globals(self): + b = """exec code in ns""" + a = """exec(code, ns)""" + self.check(b, a) + + def test_with_globals_locals(self): + b = """exec code in ns1, ns2""" + a = """exec(code, ns1, ns2)""" + self.check(b, a) + + def test_complex_1(self): + b = """exec (a.b()) in ns""" + a = """exec((a.b()), ns)""" + self.check(b, a) + + def test_complex_2(self): + b = """exec a.b() + c in ns""" + a = """exec(a.b() + c, ns)""" + self.check(b, a) + + # These should not be touched + + def test_unchanged_1(self): + s = """exec(code)""" + self.unchanged(s) + + def test_unchanged_2(self): + s = """exec (code)""" + self.unchanged(s) + + def test_unchanged_3(self): + s = """exec(code, ns)""" + self.unchanged(s) + + def test_unchanged_4(self): + s = """exec(code, ns1, ns2)""" + self.unchanged(s) + +class Test_repr(FixerTestCase): + fixer = "repr" + + def test_prefix_preservation(self): + b = """x = `1 + 2`""" + a = """x = repr(1 + 2)""" + self.check(b, a) + + def test_simple_1(self): + b = """x = `1 + 2`""" + a = """x = repr(1 + 2)""" + self.check(b, a) + + def test_simple_2(self): + b = """y = `x`""" + a = """y = repr(x)""" + self.check(b, a) + + def test_complex(self): + b = """z = `y`.__repr__()""" + a = """z = repr(y).__repr__()""" + self.check(b, a) + + def test_tuple(self): + b = """x = `1, 2, 3`""" + a = """x = repr((1, 2, 3))""" + self.check(b, a) + + def test_nested(self): + b = """x = `1 + `2``""" + a = """x = repr(1 + repr(2))""" + self.check(b, a) + + def test_nested_tuples(self): + b = """x = `1, 2 + `3, 4``""" + a = """x = repr((1, 2 + repr((3, 4))))""" + self.check(b, a) + +class Test_except(FixerTestCase): + fixer = "except" + + def test_prefix_preservation(self): + b = """ + try: + pass + except (RuntimeError, ImportError), e: + pass""" + a = """ + try: + pass + except (RuntimeError, ImportError) as e: + pass""" + self.check(b, a) + + def test_simple(self): + b = """ + try: + pass + except Foo, e: + pass""" + a = """ + try: + pass + except Foo as e: + pass""" + self.check(b, a) + + def test_simple_no_space_before_target(self): + b = """ + try: + pass + except Foo,e: + pass""" + a = """ + try: + pass + except Foo as e: + pass""" + self.check(b, a) + + def test_tuple_unpack(self): + b = """ + def foo(): + try: + pass + except Exception, (f, e): + pass + except ImportError, e: + pass""" + + a = """ + def foo(): + try: + pass + except Exception as xxx_todo_changeme: + (f, e) = xxx_todo_changeme.args + pass + except ImportError as e: + pass""" + self.check(b, a) + + def test_multi_class(self): + b = """ + try: + pass + except (RuntimeError, ImportError), e: + pass""" + + a = """ + try: + pass + except (RuntimeError, ImportError) as e: + pass""" + self.check(b, a) + + def test_list_unpack(self): + b = """ + try: + pass + except Exception, [a, b]: + pass""" + + a = """ + try: + pass + except Exception as xxx_todo_changeme: + [a, b] = xxx_todo_changeme.args + pass""" + self.check(b, a) + + def test_weird_target_1(self): + b = """ + try: + pass + except Exception, d[5]: + pass""" + + a = """ + try: + pass + except Exception as xxx_todo_changeme: + d[5] = xxx_todo_changeme + pass""" + self.check(b, a) + + def test_weird_target_2(self): + b = """ + try: + pass + except Exception, a.foo: + pass""" + + a = """ + try: + pass + except Exception as xxx_todo_changeme: + a.foo = xxx_todo_changeme + pass""" + self.check(b, a) + + def test_weird_target_3(self): + b = """ + try: + pass + except Exception, a().foo: + pass""" + + a = """ + try: + pass + except Exception as xxx_todo_changeme: + a().foo = xxx_todo_changeme + pass""" + self.check(b, a) + + def test_bare_except(self): + b = """ + try: + pass + except Exception, a: + pass + except: + pass""" + + a = """ + try: + pass + except Exception as a: + pass + except: + pass""" + self.check(b, a) + + def test_bare_except_and_else_finally(self): + b = """ + try: + pass + except Exception, a: + pass + except: + pass + else: + pass + finally: + pass""" + + a = """ + try: + pass + except Exception as a: + pass + except: + pass + else: + pass + finally: + pass""" + self.check(b, a) + + def test_multi_fixed_excepts_before_bare_except(self): + b = """ + try: + pass + except TypeError, b: + pass + except Exception, a: + pass + except: + pass""" + + a = """ + try: + pass + except TypeError as b: + pass + except Exception as a: + pass + except: + pass""" + self.check(b, a) + + def test_one_line_suites(self): + b = """ + try: raise TypeError + except TypeError, e: + pass + """ + a = """ + try: raise TypeError + except TypeError as e: + pass + """ + self.check(b, a) + b = """ + try: + raise TypeError + except TypeError, e: pass + """ + a = """ + try: + raise TypeError + except TypeError as e: pass + """ + self.check(b, a) + b = """ + try: raise TypeError + except TypeError, e: pass + """ + a = """ + try: raise TypeError + except TypeError as e: pass + """ + self.check(b, a) + b = """ + try: raise TypeError + except TypeError, e: pass + else: function() + finally: done() + """ + a = """ + try: raise TypeError + except TypeError as e: pass + else: function() + finally: done() + """ + self.check(b, a) + + # These should not be touched: + + def test_unchanged_1(self): + s = """ + try: + pass + except: + pass""" + self.unchanged(s) + + def test_unchanged_2(self): + s = """ + try: + pass + except Exception: + pass""" + self.unchanged(s) + + def test_unchanged_3(self): + s = """ + try: + pass + except (Exception, SystemExit): + pass""" + self.unchanged(s) + +class Test_raise(FixerTestCase): + fixer = "raise" + + def test_basic(self): + b = """raise Exception, 5""" + a = """raise Exception(5)""" + self.check(b, a) + + def test_prefix_preservation(self): + b = """raise Exception,5""" + a = """raise Exception(5)""" + self.check(b, a) + + b = """raise Exception, 5""" + a = """raise Exception(5)""" + self.check(b, a) + + def test_with_comments(self): + b = """raise Exception, 5 # foo""" + a = """raise Exception(5) # foo""" + self.check(b, a) + + b = """raise E, (5, 6) % (a, b) # foo""" + a = """raise E((5, 6) % (a, b)) # foo""" + self.check(b, a) + + b = """def foo(): + raise Exception, 5, 6 # foo""" + a = """def foo(): + raise Exception(5).with_traceback(6) # foo""" + self.check(b, a) + + def test_None_value(self): + b = """raise Exception(5), None, tb""" + a = """raise Exception(5).with_traceback(tb)""" + self.check(b, a) + + def test_tuple_value(self): + b = """raise Exception, (5, 6, 7)""" + a = """raise Exception(5, 6, 7)""" + self.check(b, a) + + def test_tuple_detection(self): + b = """raise E, (5, 6) % (a, b)""" + a = """raise E((5, 6) % (a, b))""" + self.check(b, a) + + def test_tuple_exc_1(self): + b = """raise (((E1, E2), E3), E4), V""" + a = """raise E1(V)""" + self.check(b, a) + + def test_tuple_exc_2(self): + b = """raise (E1, (E2, E3), E4), V""" + a = """raise E1(V)""" + self.check(b, a) + + # These should produce a warning + + def test_string_exc(self): + s = """raise 'foo'""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + def test_string_exc_val(self): + s = """raise "foo", 5""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + def test_string_exc_val_tb(self): + s = """raise "foo", 5, 6""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + # These should result in traceback-assignment + + def test_tb_1(self): + b = """def foo(): + raise Exception, 5, 6""" + a = """def foo(): + raise Exception(5).with_traceback(6)""" + self.check(b, a) + + def test_tb_2(self): + b = """def foo(): + a = 5 + raise Exception, 5, 6 + b = 6""" + a = """def foo(): + a = 5 + raise Exception(5).with_traceback(6) + b = 6""" + self.check(b, a) + + def test_tb_3(self): + b = """def foo(): + raise Exception,5,6""" + a = """def foo(): + raise Exception(5).with_traceback(6)""" + self.check(b, a) + + def test_tb_4(self): + b = """def foo(): + a = 5 + raise Exception,5,6 + b = 6""" + a = """def foo(): + a = 5 + raise Exception(5).with_traceback(6) + b = 6""" + self.check(b, a) + + def test_tb_5(self): + b = """def foo(): + raise Exception, (5, 6, 7), 6""" + a = """def foo(): + raise Exception(5, 6, 7).with_traceback(6)""" + self.check(b, a) + + def test_tb_6(self): + b = """def foo(): + a = 5 + raise Exception, (5, 6, 7), 6 + b = 6""" + a = """def foo(): + a = 5 + raise Exception(5, 6, 7).with_traceback(6) + b = 6""" + self.check(b, a) + +class Test_throw(FixerTestCase): + fixer = "throw" + + def test_1(self): + b = """g.throw(Exception, 5)""" + a = """g.throw(Exception(5))""" + self.check(b, a) + + def test_2(self): + b = """g.throw(Exception,5)""" + a = """g.throw(Exception(5))""" + self.check(b, a) + + def test_3(self): + b = """g.throw(Exception, (5, 6, 7))""" + a = """g.throw(Exception(5, 6, 7))""" + self.check(b, a) + + def test_4(self): + b = """5 + g.throw(Exception, 5)""" + a = """5 + g.throw(Exception(5))""" + self.check(b, a) + + # These should produce warnings + + def test_warn_1(self): + s = """g.throw("foo")""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + def test_warn_2(self): + s = """g.throw("foo", 5)""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + def test_warn_3(self): + s = """g.throw("foo", 5, 6)""" + self.warns_unchanged(s, "Python 3 does not support string exceptions") + + # These should not be touched + + def test_untouched_1(self): + s = """g.throw(Exception)""" + self.unchanged(s) + + def test_untouched_2(self): + s = """g.throw(Exception(5, 6))""" + self.unchanged(s) + + def test_untouched_3(self): + s = """5 + g.throw(Exception(5, 6))""" + self.unchanged(s) + + # These should result in traceback-assignment + + def test_tb_1(self): + b = """def foo(): + g.throw(Exception, 5, 6)""" + a = """def foo(): + g.throw(Exception(5).with_traceback(6))""" + self.check(b, a) + + def test_tb_2(self): + b = """def foo(): + a = 5 + g.throw(Exception, 5, 6) + b = 6""" + a = """def foo(): + a = 5 + g.throw(Exception(5).with_traceback(6)) + b = 6""" + self.check(b, a) + + def test_tb_3(self): + b = """def foo(): + g.throw(Exception,5,6)""" + a = """def foo(): + g.throw(Exception(5).with_traceback(6))""" + self.check(b, a) + + def test_tb_4(self): + b = """def foo(): + a = 5 + g.throw(Exception,5,6) + b = 6""" + a = """def foo(): + a = 5 + g.throw(Exception(5).with_traceback(6)) + b = 6""" + self.check(b, a) + + def test_tb_5(self): + b = """def foo(): + g.throw(Exception, (5, 6, 7), 6)""" + a = """def foo(): + g.throw(Exception(5, 6, 7).with_traceback(6))""" + self.check(b, a) + + def test_tb_6(self): + b = """def foo(): + a = 5 + g.throw(Exception, (5, 6, 7), 6) + b = 6""" + a = """def foo(): + a = 5 + g.throw(Exception(5, 6, 7).with_traceback(6)) + b = 6""" + self.check(b, a) + + def test_tb_7(self): + b = """def foo(): + a + g.throw(Exception, 5, 6)""" + a = """def foo(): + a + g.throw(Exception(5).with_traceback(6))""" + self.check(b, a) + + def test_tb_8(self): + b = """def foo(): + a = 5 + a + g.throw(Exception, 5, 6) + b = 6""" + a = """def foo(): + a = 5 + a + g.throw(Exception(5).with_traceback(6)) + b = 6""" + self.check(b, a) + +class Test_long(FixerTestCase): + fixer = "long" + + def test_1(self): + b = """x = long(x)""" + a = """x = int(x)""" + self.check(b, a) + + def test_2(self): + b = """y = isinstance(x, long)""" + a = """y = isinstance(x, int)""" + self.check(b, a) + + def test_3(self): + b = """z = type(x) in (int, long)""" + a = """z = type(x) in (int, int)""" + self.check(b, a) + + def test_unchanged(self): + s = """long = True""" + self.unchanged(s) + + s = """s.long = True""" + self.unchanged(s) + + s = """def long(): pass""" + self.unchanged(s) + + s = """class long(): pass""" + self.unchanged(s) + + s = """def f(long): pass""" + self.unchanged(s) + + s = """def f(g, long): pass""" + self.unchanged(s) + + s = """def f(x, long=True): pass""" + self.unchanged(s) + + def test_prefix_preservation(self): + b = """x = long( x )""" + a = """x = int( x )""" + self.check(b, a) + + +class Test_execfile(FixerTestCase): + fixer = "execfile" + + def test_conversion(self): + b = """execfile("fn")""" + a = """exec(compile(open("fn").read(), "fn", 'exec'))""" + self.check(b, a) + + b = """execfile("fn", glob)""" + a = """exec(compile(open("fn").read(), "fn", 'exec'), glob)""" + self.check(b, a) + + b = """execfile("fn", glob, loc)""" + a = """exec(compile(open("fn").read(), "fn", 'exec'), glob, loc)""" + self.check(b, a) + + b = """execfile("fn", globals=glob)""" + a = """exec(compile(open("fn").read(), "fn", 'exec'), globals=glob)""" + self.check(b, a) + + b = """execfile("fn", locals=loc)""" + a = """exec(compile(open("fn").read(), "fn", 'exec'), locals=loc)""" + self.check(b, a) + + b = """execfile("fn", globals=glob, locals=loc)""" + a = """exec(compile(open("fn").read(), "fn", 'exec'), globals=glob, locals=loc)""" + self.check(b, a) + + def test_spacing(self): + b = """execfile( "fn" )""" + a = """exec(compile(open( "fn" ).read(), "fn", 'exec'))""" + self.check(b, a) + + b = """execfile("fn", globals = glob)""" + a = """exec(compile(open("fn").read(), "fn", 'exec'), globals = glob)""" + self.check(b, a) + + +class Test_isinstance(FixerTestCase): + fixer = "isinstance" + + def test_remove_multiple_items(self): + b = """isinstance(x, (int, int, int))""" + a = """isinstance(x, int)""" + self.check(b, a) + + b = """isinstance(x, (int, float, int, int, float))""" + a = """isinstance(x, (int, float))""" + self.check(b, a) + + b = """isinstance(x, (int, float, int, int, float, str))""" + a = """isinstance(x, (int, float, str))""" + self.check(b, a) + + b = """isinstance(foo() + bar(), (x(), y(), x(), int, int))""" + a = """isinstance(foo() + bar(), (x(), y(), x(), int))""" + self.check(b, a) + + def test_prefix_preservation(self): + b = """if isinstance( foo(), ( bar, bar, baz )) : pass""" + a = """if isinstance( foo(), ( bar, baz )) : pass""" + self.check(b, a) + + def test_unchanged(self): + self.unchanged("isinstance(x, (str, int))") + +class Test_dict(FixerTestCase): + fixer = "dict" + + def test_prefix_preservation(self): + b = "if d. keys ( ) : pass" + a = "if list(d. keys ( )) : pass" + self.check(b, a) + + b = "if d. items ( ) : pass" + a = "if list(d. items ( )) : pass" + self.check(b, a) + + b = "if d. iterkeys ( ) : pass" + a = "if iter(d. keys ( )) : pass" + self.check(b, a) + + b = "[i for i in d. iterkeys( ) ]" + a = "[i for i in d. keys( ) ]" + self.check(b, a) + + b = "if d. viewkeys ( ) : pass" + a = "if d. keys ( ) : pass" + self.check(b, a) + + b = "[i for i in d. viewkeys( ) ]" + a = "[i for i in d. keys( ) ]" + self.check(b, a) + + def test_trailing_comment(self): + b = "d.keys() # foo" + a = "list(d.keys()) # foo" + self.check(b, a) + + b = "d.items() # foo" + a = "list(d.items()) # foo" + self.check(b, a) + + b = "d.iterkeys() # foo" + a = "iter(d.keys()) # foo" + self.check(b, a) + + b = """[i for i in d.iterkeys() # foo + ]""" + a = """[i for i in d.keys() # foo + ]""" + self.check(b, a) + + b = """[i for i in d.iterkeys() # foo + ]""" + a = """[i for i in d.keys() # foo + ]""" + self.check(b, a) + + b = "d.viewitems() # foo" + a = "d.items() # foo" + self.check(b, a) + + def test_unchanged(self): + for wrapper in fixer_util.consuming_calls: + s = "s = %s(d.keys())" % wrapper + self.unchanged(s) + + s = "s = %s(d.values())" % wrapper + self.unchanged(s) + + s = "s = %s(d.items())" % wrapper + self.unchanged(s) + + def test_01(self): + b = "d.keys()" + a = "list(d.keys())" + self.check(b, a) + + b = "a[0].foo().keys()" + a = "list(a[0].foo().keys())" + self.check(b, a) + + def test_02(self): + b = "d.items()" + a = "list(d.items())" + self.check(b, a) + + def test_03(self): + b = "d.values()" + a = "list(d.values())" + self.check(b, a) + + def test_04(self): + b = "d.iterkeys()" + a = "iter(d.keys())" + self.check(b, a) + + def test_05(self): + b = "d.iteritems()" + a = "iter(d.items())" + self.check(b, a) + + def test_06(self): + b = "d.itervalues()" + a = "iter(d.values())" + self.check(b, a) + + def test_07(self): + s = "list(d.keys())" + self.unchanged(s) + + def test_08(self): + s = "sorted(d.keys())" + self.unchanged(s) + + def test_09(self): + b = "iter(d.keys())" + a = "iter(list(d.keys()))" + self.check(b, a) + + def test_10(self): + b = "foo(d.keys())" + a = "foo(list(d.keys()))" + self.check(b, a) + + def test_11(self): + b = "for i in d.keys(): print i" + a = "for i in list(d.keys()): print i" + self.check(b, a) + + def test_12(self): + b = "for i in d.iterkeys(): print i" + a = "for i in d.keys(): print i" + self.check(b, a) + + def test_13(self): + b = "[i for i in d.keys()]" + a = "[i for i in list(d.keys())]" + self.check(b, a) + + def test_14(self): + b = "[i for i in d.iterkeys()]" + a = "[i for i in d.keys()]" + self.check(b, a) + + def test_15(self): + b = "(i for i in d.keys())" + a = "(i for i in list(d.keys()))" + self.check(b, a) + + def test_16(self): + b = "(i for i in d.iterkeys())" + a = "(i for i in d.keys())" + self.check(b, a) + + def test_17(self): + b = "iter(d.iterkeys())" + a = "iter(d.keys())" + self.check(b, a) + + def test_18(self): + b = "list(d.iterkeys())" + a = "list(d.keys())" + self.check(b, a) + + def test_19(self): + b = "sorted(d.iterkeys())" + a = "sorted(d.keys())" + self.check(b, a) + + def test_20(self): + b = "foo(d.iterkeys())" + a = "foo(iter(d.keys()))" + self.check(b, a) + + def test_21(self): + b = "print h.iterkeys().next()" + a = "print iter(h.keys()).next()" + self.check(b, a) + + def test_22(self): + b = "print h.keys()[0]" + a = "print list(h.keys())[0]" + self.check(b, a) + + def test_23(self): + b = "print list(h.iterkeys().next())" + a = "print list(iter(h.keys()).next())" + self.check(b, a) + + def test_24(self): + b = "for x in h.keys()[0]: print x" + a = "for x in list(h.keys())[0]: print x" + self.check(b, a) + + def test_25(self): + b = "d.viewkeys()" + a = "d.keys()" + self.check(b, a) + + def test_26(self): + b = "d.viewitems()" + a = "d.items()" + self.check(b, a) + + def test_27(self): + b = "d.viewvalues()" + a = "d.values()" + self.check(b, a) + + def test_14(self): + b = "[i for i in d.viewkeys()]" + a = "[i for i in d.keys()]" + self.check(b, a) + + def test_15(self): + b = "(i for i in d.viewkeys())" + a = "(i for i in d.keys())" + self.check(b, a) + + def test_17(self): + b = "iter(d.viewkeys())" + a = "iter(d.keys())" + self.check(b, a) + + def test_18(self): + b = "list(d.viewkeys())" + a = "list(d.keys())" + self.check(b, a) + + def test_19(self): + b = "sorted(d.viewkeys())" + a = "sorted(d.keys())" + self.check(b, a) + +class Test_xrange(FixerTestCase): + fixer = "xrange" + + def test_prefix_preservation(self): + b = """x = xrange( 10 )""" + a = """x = range( 10 )""" + self.check(b, a) + + b = """x = xrange( 1 , 10 )""" + a = """x = range( 1 , 10 )""" + self.check(b, a) + + b = """x = xrange( 0 , 10 , 2 )""" + a = """x = range( 0 , 10 , 2 )""" + self.check(b, a) + + def test_single_arg(self): + b = """x = xrange(10)""" + a = """x = range(10)""" + self.check(b, a) + + def test_two_args(self): + b = """x = xrange(1, 10)""" + a = """x = range(1, 10)""" + self.check(b, a) + + def test_three_args(self): + b = """x = xrange(0, 10, 2)""" + a = """x = range(0, 10, 2)""" + self.check(b, a) + + def test_wrap_in_list(self): + b = """x = range(10, 3, 9)""" + a = """x = list(range(10, 3, 9))""" + self.check(b, a) + + b = """x = foo(range(10, 3, 9))""" + a = """x = foo(list(range(10, 3, 9)))""" + self.check(b, a) + + b = """x = range(10, 3, 9) + [4]""" + a = """x = list(range(10, 3, 9)) + [4]""" + self.check(b, a) + + b = """x = range(10)[::-1]""" + a = """x = list(range(10))[::-1]""" + self.check(b, a) + + b = """x = range(10) [3]""" + a = """x = list(range(10)) [3]""" + self.check(b, a) + + def test_xrange_in_for(self): + b = """for i in xrange(10):\n j=i""" + a = """for i in range(10):\n j=i""" + self.check(b, a) + + b = """[i for i in xrange(10)]""" + a = """[i for i in range(10)]""" + self.check(b, a) + + def test_range_in_for(self): + self.unchanged("for i in range(10): pass") + self.unchanged("[i for i in range(10)]") + + def test_in_contains_test(self): + self.unchanged("x in range(10, 3, 9)") + + def test_in_consuming_context(self): + for call in fixer_util.consuming_calls: + self.unchanged("a = %s(range(10))" % call) + +class Test_xrange_with_reduce(FixerTestCase): + + def setUp(self): + super(Test_xrange_with_reduce, self).setUp(["xrange", "reduce"]) + + def test_double_transform(self): + b = """reduce(x, xrange(5))""" + a = """from functools import reduce +reduce(x, range(5))""" + self.check(b, a) + +class Test_raw_input(FixerTestCase): + fixer = "raw_input" + + def test_prefix_preservation(self): + b = """x = raw_input( )""" + a = """x = input( )""" + self.check(b, a) + + b = """x = raw_input( '' )""" + a = """x = input( '' )""" + self.check(b, a) + + def test_1(self): + b = """x = raw_input()""" + a = """x = input()""" + self.check(b, a) + + def test_2(self): + b = """x = raw_input('')""" + a = """x = input('')""" + self.check(b, a) + + def test_3(self): + b = """x = raw_input('prompt')""" + a = """x = input('prompt')""" + self.check(b, a) + + def test_4(self): + b = """x = raw_input(foo(a) + 6)""" + a = """x = input(foo(a) + 6)""" + self.check(b, a) + + def test_5(self): + b = """x = raw_input(invite).split()""" + a = """x = input(invite).split()""" + self.check(b, a) + + def test_6(self): + b = """x = raw_input(invite) . split ()""" + a = """x = input(invite) . split ()""" + self.check(b, a) + + def test_8(self): + b = "x = int(raw_input())" + a = "x = int(input())" + self.check(b, a) + +class Test_funcattrs(FixerTestCase): + fixer = "funcattrs" + + attrs = ["closure", "doc", "name", "defaults", "code", "globals", "dict"] + + def test(self): + for attr in self.attrs: + b = "a.func_%s" % attr + a = "a.__%s__" % attr + self.check(b, a) + + b = "self.foo.func_%s.foo_bar" % attr + a = "self.foo.__%s__.foo_bar" % attr + self.check(b, a) + + def test_unchanged(self): + for attr in self.attrs: + s = "foo(func_%s + 5)" % attr + self.unchanged(s) + + s = "f(foo.__%s__)" % attr + self.unchanged(s) + + s = "f(foo.__%s__.foo)" % attr + self.unchanged(s) + +class Test_xreadlines(FixerTestCase): + fixer = "xreadlines" + + def test_call(self): + b = "for x in f.xreadlines(): pass" + a = "for x in f: pass" + self.check(b, a) + + b = "for x in foo().xreadlines(): pass" + a = "for x in foo(): pass" + self.check(b, a) + + b = "for x in (5 + foo()).xreadlines(): pass" + a = "for x in (5 + foo()): pass" + self.check(b, a) + + def test_attr_ref(self): + b = "foo(f.xreadlines + 5)" + a = "foo(f.__iter__ + 5)" + self.check(b, a) + + b = "foo(f().xreadlines + 5)" + a = "foo(f().__iter__ + 5)" + self.check(b, a) + + b = "foo((5 + f()).xreadlines + 5)" + a = "foo((5 + f()).__iter__ + 5)" + self.check(b, a) + + def test_unchanged(self): + s = "for x in f.xreadlines(5): pass" + self.unchanged(s) + + s = "for x in f.xreadlines(k=5): pass" + self.unchanged(s) + + s = "for x in f.xreadlines(*k, **v): pass" + self.unchanged(s) + + s = "foo(xreadlines)" + self.unchanged(s) + + +class ImportsFixerTests: + + def test_import_module(self): + for old, new in list(self.modules.items()): + b = "import %s" % old + a = "import %s" % new + self.check(b, a) + + b = "import foo, %s, bar" % old + a = "import foo, %s, bar" % new + self.check(b, a) + + def test_import_from(self): + for old, new in list(self.modules.items()): + b = "from %s import foo" % old + a = "from %s import foo" % new + self.check(b, a) + + b = "from %s import foo, bar" % old + a = "from %s import foo, bar" % new + self.check(b, a) + + b = "from %s import (yes, no)" % old + a = "from %s import (yes, no)" % new + self.check(b, a) + + def test_import_module_as(self): + for old, new in list(self.modules.items()): + b = "import %s as foo_bar" % old + a = "import %s as foo_bar" % new + self.check(b, a) + + b = "import %s as foo_bar" % old + a = "import %s as foo_bar" % new + self.check(b, a) + + def test_import_from_as(self): + for old, new in list(self.modules.items()): + b = "from %s import foo as bar" % old + a = "from %s import foo as bar" % new + self.check(b, a) + + def test_star(self): + for old, new in list(self.modules.items()): + b = "from %s import *" % old + a = "from %s import *" % new + self.check(b, a) + + def test_import_module_usage(self): + for old, new in list(self.modules.items()): + b = """ + import %s + foo(%s.bar) + """ % (old, old) + a = """ + import %s + foo(%s.bar) + """ % (new, new) + self.check(b, a) + + b = """ + from %s import x + %s = 23 + """ % (old, old) + a = """ + from %s import x + %s = 23 + """ % (new, old) + self.check(b, a) + + s = """ + def f(): + %s.method() + """ % (old,) + self.unchanged(s) + + # test nested usage + b = """ + import %s + %s.bar(%s.foo) + """ % (old, old, old) + a = """ + import %s + %s.bar(%s.foo) + """ % (new, new, new) + self.check(b, a) + + b = """ + import %s + x.%s + """ % (old, old) + a = """ + import %s + x.%s + """ % (new, old) + self.check(b, a) + + +class Test_imports(FixerTestCase, ImportsFixerTests): + fixer = "imports" + from ..fixes.fix_imports import MAPPING as modules + + def test_multiple_imports(self): + b = """import urlparse, cStringIO""" + a = """import urllib.parse, io""" + self.check(b, a) + + def test_multiple_imports_as(self): + b = """ + import copy_reg as bar, HTMLParser as foo, urlparse + s = urlparse.spam(bar.foo()) + """ + a = """ + import copyreg as bar, html.parser as foo, urllib.parse + s = urllib.parse.spam(bar.foo()) + """ + self.check(b, a) + + +class Test_imports2(FixerTestCase, ImportsFixerTests): + fixer = "imports2" + from ..fixes.fix_imports2 import MAPPING as modules + + +class Test_imports_fixer_order(FixerTestCase, ImportsFixerTests): + + def setUp(self): + super(Test_imports_fixer_order, self).setUp(['imports', 'imports2']) + from ..fixes.fix_imports2 import MAPPING as mapping2 + self.modules = mapping2.copy() + from ..fixes.fix_imports import MAPPING as mapping1 + for key in ('dbhash', 'dumbdbm', 'dbm', 'gdbm'): + self.modules[key] = mapping1[key] + + def test_after_local_imports_refactoring(self): + for fix in ("imports", "imports2"): + self.fixer = fix + self.assert_runs_after("import") + + +class Test_urllib(FixerTestCase): + fixer = "urllib" + from ..fixes.fix_urllib import MAPPING as modules + + def test_import_module(self): + for old, changes in list(self.modules.items()): + b = "import %s" % old + a = "import %s" % ", ".join(map(itemgetter(0), changes)) + self.check(b, a) + + def test_import_from(self): + for old, changes in list(self.modules.items()): + all_members = [] + for new, members in changes: + for member in members: + all_members.append(member) + b = "from %s import %s" % (old, member) + a = "from %s import %s" % (new, member) + self.check(b, a) + + s = "from foo import %s" % member + self.unchanged(s) + + b = "from %s import %s" % (old, ", ".join(members)) + a = "from %s import %s" % (new, ", ".join(members)) + self.check(b, a) + + s = "from foo import %s" % ", ".join(members) + self.unchanged(s) + + # test the breaking of a module into multiple replacements + b = "from %s import %s" % (old, ", ".join(all_members)) + a = "\n".join(["from %s import %s" % (new, ", ".join(members)) + for (new, members) in changes]) + self.check(b, a) + + def test_import_module_as(self): + for old in self.modules: + s = "import %s as foo" % old + self.warns_unchanged(s, "This module is now multiple modules") + + def test_import_from_as(self): + for old, changes in list(self.modules.items()): + for new, members in changes: + for member in members: + b = "from %s import %s as foo_bar" % (old, member) + a = "from %s import %s as foo_bar" % (new, member) + self.check(b, a) + b = "from %s import %s as blah, %s" % (old, member, member) + a = "from %s import %s as blah, %s" % (new, member, member) + self.check(b, a) + + def test_star(self): + for old in self.modules: + s = "from %s import *" % old + self.warns_unchanged(s, "Cannot handle star imports") + + def test_indented(self): + b = """ +def foo(): + from urllib import urlencode, urlopen +""" + a = """ +def foo(): + from urllib.parse import urlencode + from urllib.request import urlopen +""" + self.check(b, a) + + b = """ +def foo(): + other() + from urllib import urlencode, urlopen +""" + a = """ +def foo(): + other() + from urllib.parse import urlencode + from urllib.request import urlopen +""" + self.check(b, a) + + + + def test_import_module_usage(self): + for old, changes in list(self.modules.items()): + for new, members in changes: + for member in members: + new_import = ", ".join([n for (n, mems) + in self.modules[old]]) + b = """ + import %s + foo(%s.%s) + """ % (old, old, member) + a = """ + import %s + foo(%s.%s) + """ % (new_import, new, member) + self.check(b, a) + b = """ + import %s + %s.%s(%s.%s) + """ % (old, old, member, old, member) + a = """ + import %s + %s.%s(%s.%s) + """ % (new_import, new, member, new, member) + self.check(b, a) + + +class Test_input(FixerTestCase): + fixer = "input" + + def test_prefix_preservation(self): + b = """x = input( )""" + a = """x = eval(input( ))""" + self.check(b, a) + + b = """x = input( '' )""" + a = """x = eval(input( '' ))""" + self.check(b, a) + + def test_trailing_comment(self): + b = """x = input() # foo""" + a = """x = eval(input()) # foo""" + self.check(b, a) + + def test_idempotency(self): + s = """x = eval(input())""" + self.unchanged(s) + + s = """x = eval(input(''))""" + self.unchanged(s) + + s = """x = eval(input(foo(5) + 9))""" + self.unchanged(s) + + def test_1(self): + b = """x = input()""" + a = """x = eval(input())""" + self.check(b, a) + + def test_2(self): + b = """x = input('')""" + a = """x = eval(input(''))""" + self.check(b, a) + + def test_3(self): + b = """x = input('prompt')""" + a = """x = eval(input('prompt'))""" + self.check(b, a) + + def test_4(self): + b = """x = input(foo(5) + 9)""" + a = """x = eval(input(foo(5) + 9))""" + self.check(b, a) + +class Test_tuple_params(FixerTestCase): + fixer = "tuple_params" + + def test_unchanged_1(self): + s = """def foo(): pass""" + self.unchanged(s) + + def test_unchanged_2(self): + s = """def foo(a, b, c): pass""" + self.unchanged(s) + + def test_unchanged_3(self): + s = """def foo(a=3, b=4, c=5): pass""" + self.unchanged(s) + + def test_1(self): + b = """ + def foo(((a, b), c)): + x = 5""" + + a = """ + def foo(xxx_todo_changeme): + ((a, b), c) = xxx_todo_changeme + x = 5""" + self.check(b, a) + + def test_2(self): + b = """ + def foo(((a, b), c), d): + x = 5""" + + a = """ + def foo(xxx_todo_changeme, d): + ((a, b), c) = xxx_todo_changeme + x = 5""" + self.check(b, a) + + def test_3(self): + b = """ + def foo(((a, b), c), d) -> e: + x = 5""" + + a = """ + def foo(xxx_todo_changeme, d) -> e: + ((a, b), c) = xxx_todo_changeme + x = 5""" + self.check(b, a) + + def test_semicolon(self): + b = """ + def foo(((a, b), c)): x = 5; y = 7""" + + a = """ + def foo(xxx_todo_changeme): ((a, b), c) = xxx_todo_changeme; x = 5; y = 7""" + self.check(b, a) + + def test_keywords(self): + b = """ + def foo(((a, b), c), d, e=5) -> z: + x = 5""" + + a = """ + def foo(xxx_todo_changeme, d, e=5) -> z: + ((a, b), c) = xxx_todo_changeme + x = 5""" + self.check(b, a) + + def test_varargs(self): + b = """ + def foo(((a, b), c), d, *vargs, **kwargs) -> z: + x = 5""" + + a = """ + def foo(xxx_todo_changeme, d, *vargs, **kwargs) -> z: + ((a, b), c) = xxx_todo_changeme + x = 5""" + self.check(b, a) + + def test_multi_1(self): + b = """ + def foo(((a, b), c), (d, e, f)) -> z: + x = 5""" + + a = """ + def foo(xxx_todo_changeme, xxx_todo_changeme1) -> z: + ((a, b), c) = xxx_todo_changeme + (d, e, f) = xxx_todo_changeme1 + x = 5""" + self.check(b, a) + + def test_multi_2(self): + b = """ + def foo(x, ((a, b), c), d, (e, f, g), y) -> z: + x = 5""" + + a = """ + def foo(x, xxx_todo_changeme, d, xxx_todo_changeme1, y) -> z: + ((a, b), c) = xxx_todo_changeme + (e, f, g) = xxx_todo_changeme1 + x = 5""" + self.check(b, a) + + def test_docstring(self): + b = """ + def foo(((a, b), c), (d, e, f)) -> z: + "foo foo foo foo" + x = 5""" + + a = """ + def foo(xxx_todo_changeme, xxx_todo_changeme1) -> z: + "foo foo foo foo" + ((a, b), c) = xxx_todo_changeme + (d, e, f) = xxx_todo_changeme1 + x = 5""" + self.check(b, a) + + def test_lambda_no_change(self): + s = """lambda x: x + 5""" + self.unchanged(s) + + def test_lambda_parens_single_arg(self): + b = """lambda (x): x + 5""" + a = """lambda x: x + 5""" + self.check(b, a) + + b = """lambda(x): x + 5""" + a = """lambda x: x + 5""" + self.check(b, a) + + b = """lambda ((((x)))): x + 5""" + a = """lambda x: x + 5""" + self.check(b, a) + + b = """lambda((((x)))): x + 5""" + a = """lambda x: x + 5""" + self.check(b, a) + + def test_lambda_simple(self): + b = """lambda (x, y): x + f(y)""" + a = """lambda x_y: x_y[0] + f(x_y[1])""" + self.check(b, a) + + b = """lambda(x, y): x + f(y)""" + a = """lambda x_y: x_y[0] + f(x_y[1])""" + self.check(b, a) + + b = """lambda (((x, y))): x + f(y)""" + a = """lambda x_y: x_y[0] + f(x_y[1])""" + self.check(b, a) + + b = """lambda(((x, y))): x + f(y)""" + a = """lambda x_y: x_y[0] + f(x_y[1])""" + self.check(b, a) + + def test_lambda_one_tuple(self): + b = """lambda (x,): x + f(x)""" + a = """lambda x1: x1[0] + f(x1[0])""" + self.check(b, a) + + b = """lambda (((x,))): x + f(x)""" + a = """lambda x1: x1[0] + f(x1[0])""" + self.check(b, a) + + def test_lambda_simple_multi_use(self): + b = """lambda (x, y): x + x + f(x) + x""" + a = """lambda x_y: x_y[0] + x_y[0] + f(x_y[0]) + x_y[0]""" + self.check(b, a) + + def test_lambda_simple_reverse(self): + b = """lambda (x, y): y + x""" + a = """lambda x_y: x_y[1] + x_y[0]""" + self.check(b, a) + + def test_lambda_nested(self): + b = """lambda (x, (y, z)): x + y + z""" + a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + x_y_z[1][1]""" + self.check(b, a) + + b = """lambda (((x, (y, z)))): x + y + z""" + a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + x_y_z[1][1]""" + self.check(b, a) + + def test_lambda_nested_multi_use(self): + b = """lambda (x, (y, z)): x + y + f(y)""" + a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + f(x_y_z[1][0])""" + self.check(b, a) + +class Test_methodattrs(FixerTestCase): + fixer = "methodattrs" + + attrs = ["func", "self", "class"] + + def test(self): + for attr in self.attrs: + b = "a.im_%s" % attr + if attr == "class": + a = "a.__self__.__class__" + else: + a = "a.__%s__" % attr + self.check(b, a) + + b = "self.foo.im_%s.foo_bar" % attr + if attr == "class": + a = "self.foo.__self__.__class__.foo_bar" + else: + a = "self.foo.__%s__.foo_bar" % attr + self.check(b, a) + + def test_unchanged(self): + for attr in self.attrs: + s = "foo(im_%s + 5)" % attr + self.unchanged(s) + + s = "f(foo.__%s__)" % attr + self.unchanged(s) + + s = "f(foo.__%s__.foo)" % attr + self.unchanged(s) + +class Test_next(FixerTestCase): + fixer = "next" + + def test_1(self): + b = """it.next()""" + a = """next(it)""" + self.check(b, a) + + def test_2(self): + b = """a.b.c.d.next()""" + a = """next(a.b.c.d)""" + self.check(b, a) + + def test_3(self): + b = """(a + b).next()""" + a = """next((a + b))""" + self.check(b, a) + + def test_4(self): + b = """a().next()""" + a = """next(a())""" + self.check(b, a) + + def test_5(self): + b = """a().next() + b""" + a = """next(a()) + b""" + self.check(b, a) + + def test_6(self): + b = """c( a().next() + b)""" + a = """c( next(a()) + b)""" + self.check(b, a) + + def test_prefix_preservation_1(self): + b = """ + for a in b: + foo(a) + a.next() + """ + a = """ + for a in b: + foo(a) + next(a) + """ + self.check(b, a) + + def test_prefix_preservation_2(self): + b = """ + for a in b: + foo(a) # abc + # def + a.next() + """ + a = """ + for a in b: + foo(a) # abc + # def + next(a) + """ + self.check(b, a) + + def test_prefix_preservation_3(self): + b = """ + next = 5 + for a in b: + foo(a) + a.next() + """ + a = """ + next = 5 + for a in b: + foo(a) + a.__next__() + """ + self.check(b, a, ignore_warnings=True) + + def test_prefix_preservation_4(self): + b = """ + next = 5 + for a in b: + foo(a) # abc + # def + a.next() + """ + a = """ + next = 5 + for a in b: + foo(a) # abc + # def + a.__next__() + """ + self.check(b, a, ignore_warnings=True) + + def test_prefix_preservation_5(self): + b = """ + next = 5 + for a in b: + foo(foo(a), # abc + a.next()) + """ + a = """ + next = 5 + for a in b: + foo(foo(a), # abc + a.__next__()) + """ + self.check(b, a, ignore_warnings=True) + + def test_prefix_preservation_6(self): + b = """ + for a in b: + foo(foo(a), # abc + a.next()) + """ + a = """ + for a in b: + foo(foo(a), # abc + next(a)) + """ + self.check(b, a) + + def test_method_1(self): + b = """ + class A: + def next(self): + pass + """ + a = """ + class A: + def __next__(self): + pass + """ + self.check(b, a) + + def test_method_2(self): + b = """ + class A(object): + def next(self): + pass + """ + a = """ + class A(object): + def __next__(self): + pass + """ + self.check(b, a) + + def test_method_3(self): + b = """ + class A: + def next(x): + pass + """ + a = """ + class A: + def __next__(x): + pass + """ + self.check(b, a) + + def test_method_4(self): + b = """ + class A: + def __init__(self, foo): + self.foo = foo + + def next(self): + pass + + def __iter__(self): + return self + """ + a = """ + class A: + def __init__(self, foo): + self.foo = foo + + def __next__(self): + pass + + def __iter__(self): + return self + """ + self.check(b, a) + + def test_method_unchanged(self): + s = """ + class A: + def next(self, a, b): + pass + """ + self.unchanged(s) + + def test_shadowing_assign_simple(self): + s = """ + next = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_assign_tuple_1(self): + s = """ + (next, a) = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_assign_tuple_2(self): + s = """ + (a, (b, (next, c)), a) = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_assign_list_1(self): + s = """ + [next, a] = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_assign_list_2(self): + s = """ + [a, [b, [next, c]], a] = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_builtin_assign(self): + s = """ + def foo(): + __builtin__.next = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_builtin_assign_in_tuple(self): + s = """ + def foo(): + (a, __builtin__.next) = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_builtin_assign_in_list(self): + s = """ + def foo(): + [a, __builtin__.next] = foo + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_assign_to_next(self): + s = """ + def foo(): + A.next = foo + + class A: + def next(self, a, b): + pass + """ + self.unchanged(s) + + def test_assign_to_next_in_tuple(self): + s = """ + def foo(): + (a, A.next) = foo + + class A: + def next(self, a, b): + pass + """ + self.unchanged(s) + + def test_assign_to_next_in_list(self): + s = """ + def foo(): + [a, A.next] = foo + + class A: + def next(self, a, b): + pass + """ + self.unchanged(s) + + def test_shadowing_import_1(self): + s = """ + import foo.bar as next + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_2(self): + s = """ + import bar, bar.foo as next + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_3(self): + s = """ + import bar, bar.foo as next, baz + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_from_1(self): + s = """ + from x import next + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_from_2(self): + s = """ + from x.a import next + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_from_3(self): + s = """ + from x import a, next, b + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_import_from_4(self): + s = """ + from x.a import a, next, b + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_funcdef_1(self): + s = """ + def next(a): + pass + + class A: + def next(self, a, b): + pass + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_funcdef_2(self): + b = """ + def next(a): + pass + + class A: + def next(self): + pass + + it.next() + """ + a = """ + def next(a): + pass + + class A: + def __next__(self): + pass + + it.__next__() + """ + self.warns(b, a, "Calls to builtin next() possibly shadowed") + + def test_shadowing_global_1(self): + s = """ + def f(): + global next + next = 5 + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_global_2(self): + s = """ + def f(): + global a, next, b + next = 5 + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_for_simple(self): + s = """ + for next in it(): + pass + + b = 5 + c = 6 + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_for_tuple_1(self): + s = """ + for next, b in it(): + pass + + b = 5 + c = 6 + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_shadowing_for_tuple_2(self): + s = """ + for a, (next, c), b in it(): + pass + + b = 5 + c = 6 + """ + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") + + def test_noncall_access_1(self): + b = """gnext = g.next""" + a = """gnext = g.__next__""" + self.check(b, a) + + def test_noncall_access_2(self): + b = """f(g.next + 5)""" + a = """f(g.__next__ + 5)""" + self.check(b, a) + + def test_noncall_access_3(self): + b = """f(g().next + 5)""" + a = """f(g().__next__ + 5)""" + self.check(b, a) + +class Test_nonzero(FixerTestCase): + fixer = "nonzero" + + def test_1(self): + b = """ + class A: + def __nonzero__(self): + pass + """ + a = """ + class A: + def __bool__(self): + pass + """ + self.check(b, a) + + def test_2(self): + b = """ + class A(object): + def __nonzero__(self): + pass + """ + a = """ + class A(object): + def __bool__(self): + pass + """ + self.check(b, a) + + def test_unchanged_1(self): + s = """ + class A(object): + def __bool__(self): + pass + """ + self.unchanged(s) + + def test_unchanged_2(self): + s = """ + class A(object): + def __nonzero__(self, a): + pass + """ + self.unchanged(s) + + def test_unchanged_func(self): + s = """ + def __nonzero__(self): + pass + """ + self.unchanged(s) + +class Test_numliterals(FixerTestCase): + fixer = "numliterals" + + def test_octal_1(self): + b = """0755""" + a = """0o755""" + self.check(b, a) + + def test_long_int_1(self): + b = """a = 12L""" + a = """a = 12""" + self.check(b, a) + + def test_long_int_2(self): + b = """a = 12l""" + a = """a = 12""" + self.check(b, a) + + def test_long_hex(self): + b = """b = 0x12l""" + a = """b = 0x12""" + self.check(b, a) + + def test_comments_and_spacing(self): + b = """b = 0x12L""" + a = """b = 0x12""" + self.check(b, a) + + b = """b = 0755 # spam""" + a = """b = 0o755 # spam""" + self.check(b, a) + + def test_unchanged_int(self): + s = """5""" + self.unchanged(s) + + def test_unchanged_float(self): + s = """5.0""" + self.unchanged(s) + + def test_unchanged_octal(self): + s = """0o755""" + self.unchanged(s) + + def test_unchanged_hex(self): + s = """0xABC""" + self.unchanged(s) + + def test_unchanged_exp(self): + s = """5.0e10""" + self.unchanged(s) + + def test_unchanged_complex_int(self): + s = """5 + 4j""" + self.unchanged(s) + + def test_unchanged_complex_float(self): + s = """5.4 + 4.9j""" + self.unchanged(s) + + def test_unchanged_complex_bare(self): + s = """4j""" + self.unchanged(s) + s = """4.4j""" + self.unchanged(s) + +class Test_renames(FixerTestCase): + fixer = "renames" + + modules = {"sys": ("maxint", "maxsize"), + } + + def test_import_from(self): + for mod, (old, new) in list(self.modules.items()): + b = "from %s import %s" % (mod, old) + a = "from %s import %s" % (mod, new) + self.check(b, a) + + s = "from foo import %s" % old + self.unchanged(s) + + def test_import_from_as(self): + for mod, (old, new) in list(self.modules.items()): + b = "from %s import %s as foo_bar" % (mod, old) + a = "from %s import %s as foo_bar" % (mod, new) + self.check(b, a) + + def test_import_module_usage(self): + for mod, (old, new) in list(self.modules.items()): + b = """ + import %s + foo(%s, %s.%s) + """ % (mod, mod, mod, old) + a = """ + import %s + foo(%s, %s.%s) + """ % (mod, mod, mod, new) + self.check(b, a) + + def XXX_test_from_import_usage(self): + # not implemented yet + for mod, (old, new) in list(self.modules.items()): + b = """ + from %s import %s + foo(%s, %s) + """ % (mod, old, mod, old) + a = """ + from %s import %s + foo(%s, %s) + """ % (mod, new, mod, new) + self.check(b, a) + +class Test_unicode(FixerTestCase): + fixer = "unicode" + + def test_whitespace(self): + b = """unicode( x)""" + a = """str( x)""" + self.check(b, a) + + b = """ unicode(x )""" + a = """ str(x )""" + self.check(b, a) + + b = """ u'h'""" + a = """ 'h'""" + self.check(b, a) + + def test_unicode_call(self): + b = """unicode(x, y, z)""" + a = """str(x, y, z)""" + self.check(b, a) + + def test_unichr(self): + b = """unichr(u'h')""" + a = """chr('h')""" + self.check(b, a) + + def test_unicode_literal_1(self): + b = '''u"x"''' + a = '''"x"''' + self.check(b, a) + + def test_unicode_literal_2(self): + b = """ur'x'""" + a = """r'x'""" + self.check(b, a) + + def test_unicode_literal_3(self): + b = """UR'''x''' """ + a = """R'''x''' """ + self.check(b, a) + +class Test_callable(FixerTestCase): + fixer = "callable" + + def test_prefix_preservation(self): + b = """callable( x)""" + a = """import collections\nisinstance( x, collections.Callable)""" + self.check(b, a) + + b = """if callable(x): pass""" + a = """import collections +if isinstance(x, collections.Callable): pass""" + self.check(b, a) + + def test_callable_call(self): + b = """callable(x)""" + a = """import collections\nisinstance(x, collections.Callable)""" + self.check(b, a) + + def test_global_import(self): + b = """ +def spam(foo): + callable(foo)"""[1:] + a = """ +import collections +def spam(foo): + isinstance(foo, collections.Callable)"""[1:] + self.check(b, a) + + b = """ +import collections +def spam(foo): + callable(foo)"""[1:] + # same output if it was already imported + self.check(b, a) + + b = """ +from collections import * +def spam(foo): + callable(foo)"""[1:] + a = """ +from collections import * +import collections +def spam(foo): + isinstance(foo, collections.Callable)"""[1:] + self.check(b, a) + + b = """ +do_stuff() +do_some_other_stuff() +assert callable(do_stuff)"""[1:] + a = """ +import collections +do_stuff() +do_some_other_stuff() +assert isinstance(do_stuff, collections.Callable)"""[1:] + self.check(b, a) + + b = """ +if isinstance(do_stuff, Callable): + assert callable(do_stuff) + do_stuff(do_stuff) + if not callable(do_stuff): + exit(1) + else: + assert callable(do_stuff) +else: + assert not callable(do_stuff)"""[1:] + a = """ +import collections +if isinstance(do_stuff, Callable): + assert isinstance(do_stuff, collections.Callable) + do_stuff(do_stuff) + if not isinstance(do_stuff, collections.Callable): + exit(1) + else: + assert isinstance(do_stuff, collections.Callable) +else: + assert not isinstance(do_stuff, collections.Callable)"""[1:] + self.check(b, a) + + def test_callable_should_not_change(self): + a = """callable(*x)""" + self.unchanged(a) + + a = """callable(x, y)""" + self.unchanged(a) + + a = """callable(x, kw=y)""" + self.unchanged(a) + + a = """callable()""" + self.unchanged(a) + +class Test_filter(FixerTestCase): + fixer = "filter" + + def test_prefix_preservation(self): + b = """x = filter( foo, 'abc' )""" + a = """x = list(filter( foo, 'abc' ))""" + self.check(b, a) + + b = """x = filter( None , 'abc' )""" + a = """x = [_f for _f in 'abc' if _f]""" + self.check(b, a) + + def test_filter_basic(self): + b = """x = filter(None, 'abc')""" + a = """x = [_f for _f in 'abc' if _f]""" + self.check(b, a) + + b = """x = len(filter(f, 'abc'))""" + a = """x = len(list(filter(f, 'abc')))""" + self.check(b, a) + + b = """x = filter(lambda x: x%2 == 0, range(10))""" + a = """x = [x for x in range(10) if x%2 == 0]""" + self.check(b, a) + + # Note the parens around x + b = """x = filter(lambda (x): x%2 == 0, range(10))""" + a = """x = [x for x in range(10) if x%2 == 0]""" + self.check(b, a) + + # XXX This (rare) case is not supported +## b = """x = filter(f, 'abc')[0]""" +## a = """x = list(filter(f, 'abc'))[0]""" +## self.check(b, a) + + def test_filter_nochange(self): + a = """b.join(filter(f, 'abc'))""" + self.unchanged(a) + a = """(a + foo(5)).join(filter(f, 'abc'))""" + self.unchanged(a) + a = """iter(filter(f, 'abc'))""" + self.unchanged(a) + a = """list(filter(f, 'abc'))""" + self.unchanged(a) + a = """list(filter(f, 'abc'))[0]""" + self.unchanged(a) + a = """set(filter(f, 'abc'))""" + self.unchanged(a) + a = """set(filter(f, 'abc')).pop()""" + self.unchanged(a) + a = """tuple(filter(f, 'abc'))""" + self.unchanged(a) + a = """any(filter(f, 'abc'))""" + self.unchanged(a) + a = """all(filter(f, 'abc'))""" + self.unchanged(a) + a = """sum(filter(f, 'abc'))""" + self.unchanged(a) + a = """sorted(filter(f, 'abc'))""" + self.unchanged(a) + a = """sorted(filter(f, 'abc'), key=blah)""" + self.unchanged(a) + a = """sorted(filter(f, 'abc'), key=blah)[0]""" + self.unchanged(a) + a = """for i in filter(f, 'abc'): pass""" + self.unchanged(a) + a = """[x for x in filter(f, 'abc')]""" + self.unchanged(a) + a = """(x for x in filter(f, 'abc'))""" + self.unchanged(a) + + def test_future_builtins(self): + a = "from future_builtins import spam, filter; filter(f, 'ham')" + self.unchanged(a) + + b = """from future_builtins import spam; x = filter(f, 'abc')""" + a = """from future_builtins import spam; x = list(filter(f, 'abc'))""" + self.check(b, a) + + a = "from future_builtins import *; filter(f, 'ham')" + self.unchanged(a) + +class Test_map(FixerTestCase): + fixer = "map" + + def check(self, b, a): + self.unchanged("from future_builtins import map; " + b, a) + super(Test_map, self).check(b, a) + + def test_prefix_preservation(self): + b = """x = map( f, 'abc' )""" + a = """x = list(map( f, 'abc' ))""" + self.check(b, a) + + def test_trailing_comment(self): + b = """x = map(f, 'abc') # foo""" + a = """x = list(map(f, 'abc')) # foo""" + self.check(b, a) + + def test_None_with_multiple_arguments(self): + s = """x = map(None, a, b, c)""" + self.warns_unchanged(s, "cannot convert map(None, ...) with " + "multiple arguments") + + def test_map_basic(self): + b = """x = map(f, 'abc')""" + a = """x = list(map(f, 'abc'))""" + self.check(b, a) + + b = """x = len(map(f, 'abc', 'def'))""" + a = """x = len(list(map(f, 'abc', 'def')))""" + self.check(b, a) + + b = """x = map(None, 'abc')""" + a = """x = list('abc')""" + self.check(b, a) + + b = """x = map(lambda x: x+1, range(4))""" + a = """x = [x+1 for x in range(4)]""" + self.check(b, a) + + # Note the parens around x + b = """x = map(lambda (x): x+1, range(4))""" + a = """x = [x+1 for x in range(4)]""" + self.check(b, a) + + b = """ + foo() + # foo + map(f, x) + """ + a = """ + foo() + # foo + list(map(f, x)) + """ + self.warns(b, a, "You should use a for loop here") + + # XXX This (rare) case is not supported +## b = """x = map(f, 'abc')[0]""" +## a = """x = list(map(f, 'abc'))[0]""" +## self.check(b, a) + + def test_map_nochange(self): + a = """b.join(map(f, 'abc'))""" + self.unchanged(a) + a = """(a + foo(5)).join(map(f, 'abc'))""" + self.unchanged(a) + a = """iter(map(f, 'abc'))""" + self.unchanged(a) + a = """list(map(f, 'abc'))""" + self.unchanged(a) + a = """list(map(f, 'abc'))[0]""" + self.unchanged(a) + a = """set(map(f, 'abc'))""" + self.unchanged(a) + a = """set(map(f, 'abc')).pop()""" + self.unchanged(a) + a = """tuple(map(f, 'abc'))""" + self.unchanged(a) + a = """any(map(f, 'abc'))""" + self.unchanged(a) + a = """all(map(f, 'abc'))""" + self.unchanged(a) + a = """sum(map(f, 'abc'))""" + self.unchanged(a) + a = """sorted(map(f, 'abc'))""" + self.unchanged(a) + a = """sorted(map(f, 'abc'), key=blah)""" + self.unchanged(a) + a = """sorted(map(f, 'abc'), key=blah)[0]""" + self.unchanged(a) + a = """for i in map(f, 'abc'): pass""" + self.unchanged(a) + a = """[x for x in map(f, 'abc')]""" + self.unchanged(a) + a = """(x for x in map(f, 'abc'))""" + self.unchanged(a) + + def test_future_builtins(self): + a = "from future_builtins import spam, map, eggs; map(f, 'ham')" + self.unchanged(a) + + b = """from future_builtins import spam, eggs; x = map(f, 'abc')""" + a = """from future_builtins import spam, eggs; x = list(map(f, 'abc'))""" + self.check(b, a) + + a = "from future_builtins import *; map(f, 'ham')" + self.unchanged(a) + +class Test_zip(FixerTestCase): + fixer = "zip" + + def check(self, b, a): + self.unchanged("from future_builtins import zip; " + b, a) + super(Test_zip, self).check(b, a) + + def test_zip_basic(self): + b = """x = zip(a, b, c)""" + a = """x = list(zip(a, b, c))""" + self.check(b, a) + + b = """x = len(zip(a, b))""" + a = """x = len(list(zip(a, b)))""" + self.check(b, a) + + def test_zip_nochange(self): + a = """b.join(zip(a, b))""" + self.unchanged(a) + a = """(a + foo(5)).join(zip(a, b))""" + self.unchanged(a) + a = """iter(zip(a, b))""" + self.unchanged(a) + a = """list(zip(a, b))""" + self.unchanged(a) + a = """list(zip(a, b))[0]""" + self.unchanged(a) + a = """set(zip(a, b))""" + self.unchanged(a) + a = """set(zip(a, b)).pop()""" + self.unchanged(a) + a = """tuple(zip(a, b))""" + self.unchanged(a) + a = """any(zip(a, b))""" + self.unchanged(a) + a = """all(zip(a, b))""" + self.unchanged(a) + a = """sum(zip(a, b))""" + self.unchanged(a) + a = """sorted(zip(a, b))""" + self.unchanged(a) + a = """sorted(zip(a, b), key=blah)""" + self.unchanged(a) + a = """sorted(zip(a, b), key=blah)[0]""" + self.unchanged(a) + a = """for i in zip(a, b): pass""" + self.unchanged(a) + a = """[x for x in zip(a, b)]""" + self.unchanged(a) + a = """(x for x in zip(a, b))""" + self.unchanged(a) + + def test_future_builtins(self): + a = "from future_builtins import spam, zip, eggs; zip(a, b)" + self.unchanged(a) + + b = """from future_builtins import spam, eggs; x = zip(a, b)""" + a = """from future_builtins import spam, eggs; x = list(zip(a, b))""" + self.check(b, a) + + a = "from future_builtins import *; zip(a, b)" + self.unchanged(a) + +class Test_standarderror(FixerTestCase): + fixer = "standarderror" + + def test(self): + b = """x = StandardError()""" + a = """x = Exception()""" + self.check(b, a) + + b = """x = StandardError(a, b, c)""" + a = """x = Exception(a, b, c)""" + self.check(b, a) + + b = """f(2 + StandardError(a, b, c))""" + a = """f(2 + Exception(a, b, c))""" + self.check(b, a) + +class Test_types(FixerTestCase): + fixer = "types" + + def test_basic_types_convert(self): + b = """types.StringType""" + a = """bytes""" + self.check(b, a) + + b = """types.DictType""" + a = """dict""" + self.check(b, a) + + b = """types . IntType""" + a = """int""" + self.check(b, a) + + b = """types.ListType""" + a = """list""" + self.check(b, a) + + b = """types.LongType""" + a = """int""" + self.check(b, a) + + b = """types.NoneType""" + a = """type(None)""" + self.check(b, a) + +class Test_idioms(FixerTestCase): + fixer = "idioms" + + def test_while(self): + b = """while 1: foo()""" + a = """while True: foo()""" + self.check(b, a) + + b = """while 1: foo()""" + a = """while True: foo()""" + self.check(b, a) + + b = """ + while 1: + foo() + """ + a = """ + while True: + foo() + """ + self.check(b, a) + + def test_while_unchanged(self): + s = """while 11: foo()""" + self.unchanged(s) + + s = """while 0: foo()""" + self.unchanged(s) + + s = """while foo(): foo()""" + self.unchanged(s) + + s = """while []: foo()""" + self.unchanged(s) + + def test_eq_simple(self): + b = """type(x) == T""" + a = """isinstance(x, T)""" + self.check(b, a) + + b = """if type(x) == T: pass""" + a = """if isinstance(x, T): pass""" + self.check(b, a) + + def test_eq_reverse(self): + b = """T == type(x)""" + a = """isinstance(x, T)""" + self.check(b, a) + + b = """if T == type(x): pass""" + a = """if isinstance(x, T): pass""" + self.check(b, a) + + def test_eq_expression(self): + b = """type(x+y) == d.get('T')""" + a = """isinstance(x+y, d.get('T'))""" + self.check(b, a) + + b = """type( x + y) == d.get('T')""" + a = """isinstance(x + y, d.get('T'))""" + self.check(b, a) + + def test_is_simple(self): + b = """type(x) is T""" + a = """isinstance(x, T)""" + self.check(b, a) + + b = """if type(x) is T: pass""" + a = """if isinstance(x, T): pass""" + self.check(b, a) + + def test_is_reverse(self): + b = """T is type(x)""" + a = """isinstance(x, T)""" + self.check(b, a) + + b = """if T is type(x): pass""" + a = """if isinstance(x, T): pass""" + self.check(b, a) + + def test_is_expression(self): + b = """type(x+y) is d.get('T')""" + a = """isinstance(x+y, d.get('T'))""" + self.check(b, a) + + b = """type( x + y) is d.get('T')""" + a = """isinstance(x + y, d.get('T'))""" + self.check(b, a) + + def test_is_not_simple(self): + b = """type(x) is not T""" + a = """not isinstance(x, T)""" + self.check(b, a) + + b = """if type(x) is not T: pass""" + a = """if not isinstance(x, T): pass""" + self.check(b, a) + + def test_is_not_reverse(self): + b = """T is not type(x)""" + a = """not isinstance(x, T)""" + self.check(b, a) + + b = """if T is not type(x): pass""" + a = """if not isinstance(x, T): pass""" + self.check(b, a) + + def test_is_not_expression(self): + b = """type(x+y) is not d.get('T')""" + a = """not isinstance(x+y, d.get('T'))""" + self.check(b, a) + + b = """type( x + y) is not d.get('T')""" + a = """not isinstance(x + y, d.get('T'))""" + self.check(b, a) + + def test_ne_simple(self): + b = """type(x) != T""" + a = """not isinstance(x, T)""" + self.check(b, a) + + b = """if type(x) != T: pass""" + a = """if not isinstance(x, T): pass""" + self.check(b, a) + + def test_ne_reverse(self): + b = """T != type(x)""" + a = """not isinstance(x, T)""" + self.check(b, a) + + b = """if T != type(x): pass""" + a = """if not isinstance(x, T): pass""" + self.check(b, a) + + def test_ne_expression(self): + b = """type(x+y) != d.get('T')""" + a = """not isinstance(x+y, d.get('T'))""" + self.check(b, a) + + b = """type( x + y) != d.get('T')""" + a = """not isinstance(x + y, d.get('T'))""" + self.check(b, a) + + def test_type_unchanged(self): + a = """type(x).__name__""" + self.unchanged(a) + + def test_sort_list_call(self): + b = """ + v = list(t) + v.sort() + foo(v) + """ + a = """ + v = sorted(t) + foo(v) + """ + self.check(b, a) + + b = """ + v = list(foo(b) + d) + v.sort() + foo(v) + """ + a = """ + v = sorted(foo(b) + d) + foo(v) + """ + self.check(b, a) + + b = """ + while x: + v = list(t) + v.sort() + foo(v) + """ + a = """ + while x: + v = sorted(t) + foo(v) + """ + self.check(b, a) + + b = """ + v = list(t) + # foo + v.sort() + foo(v) + """ + a = """ + v = sorted(t) + # foo + foo(v) + """ + self.check(b, a) + + b = r""" + v = list( t) + v.sort() + foo(v) + """ + a = r""" + v = sorted( t) + foo(v) + """ + self.check(b, a) + + b = r""" + try: + m = list(s) + m.sort() + except: pass + """ + + a = r""" + try: + m = sorted(s) + except: pass + """ + self.check(b, a) + + b = r""" + try: + m = list(s) + # foo + m.sort() + except: pass + """ + + a = r""" + try: + m = sorted(s) + # foo + except: pass + """ + self.check(b, a) + + b = r""" + m = list(s) + # more comments + m.sort()""" + + a = r""" + m = sorted(s) + # more comments""" + self.check(b, a) + + def test_sort_simple_expr(self): + b = """ + v = t + v.sort() + foo(v) + """ + a = """ + v = sorted(t) + foo(v) + """ + self.check(b, a) + + b = """ + v = foo(b) + v.sort() + foo(v) + """ + a = """ + v = sorted(foo(b)) + foo(v) + """ + self.check(b, a) + + b = """ + v = b.keys() + v.sort() + foo(v) + """ + a = """ + v = sorted(b.keys()) + foo(v) + """ + self.check(b, a) + + b = """ + v = foo(b) + d + v.sort() + foo(v) + """ + a = """ + v = sorted(foo(b) + d) + foo(v) + """ + self.check(b, a) + + b = """ + while x: + v = t + v.sort() + foo(v) + """ + a = """ + while x: + v = sorted(t) + foo(v) + """ + self.check(b, a) + + b = """ + v = t + # foo + v.sort() + foo(v) + """ + a = """ + v = sorted(t) + # foo + foo(v) + """ + self.check(b, a) + + b = r""" + v = t + v.sort() + foo(v) + """ + a = r""" + v = sorted(t) + foo(v) + """ + self.check(b, a) + + def test_sort_unchanged(self): + s = """ + v = list(t) + w.sort() + foo(w) + """ + self.unchanged(s) + + s = """ + v = list(t) + v.sort(u) + foo(v) + """ + self.unchanged(s) + +class Test_basestring(FixerTestCase): + fixer = "basestring" + + def test_basestring(self): + b = """isinstance(x, basestring)""" + a = """isinstance(x, str)""" + self.check(b, a) + +class Test_buffer(FixerTestCase): + fixer = "buffer" + + def test_buffer(self): + b = """x = buffer(y)""" + a = """x = memoryview(y)""" + self.check(b, a) + + def test_slicing(self): + b = """buffer(y)[4:5]""" + a = """memoryview(y)[4:5]""" + self.check(b, a) + +class Test_future(FixerTestCase): + fixer = "future" + + def test_future(self): + b = """from __future__ import braces""" + a = """""" + self.check(b, a) + + b = """# comment\nfrom __future__ import braces""" + a = """# comment\n""" + self.check(b, a) + + b = """from __future__ import braces\n# comment""" + a = """\n# comment""" + self.check(b, a) + + def test_run_order(self): + self.assert_runs_after('print') + +class Test_itertools(FixerTestCase): + fixer = "itertools" + + def checkall(self, before, after): + # Because we need to check with and without the itertools prefix + # and on each of the three functions, these loops make it all + # much easier + for i in ('itertools.', ''): + for f in ('map', 'filter', 'zip'): + b = before %(i+'i'+f) + a = after %(f) + self.check(b, a) + + def test_0(self): + # A simple example -- test_1 covers exactly the same thing, + # but it's not quite as clear. + b = "itertools.izip(a, b)" + a = "zip(a, b)" + self.check(b, a) + + def test_1(self): + b = """%s(f, a)""" + a = """%s(f, a)""" + self.checkall(b, a) + + def test_2(self): + b = """itertools.ifilterfalse(a, b)""" + a = """itertools.filterfalse(a, b)""" + self.check(b, a) + + def test_4(self): + b = """ifilterfalse(a, b)""" + a = """filterfalse(a, b)""" + self.check(b, a) + + def test_space_1(self): + b = """ %s(f, a)""" + a = """ %s(f, a)""" + self.checkall(b, a) + + def test_space_2(self): + b = """ itertools.ifilterfalse(a, b)""" + a = """ itertools.filterfalse(a, b)""" + self.check(b, a) + + def test_run_order(self): + self.assert_runs_after('map', 'zip', 'filter') + +class Test_itertools_imports(FixerTestCase): + fixer = 'itertools_imports' + + def test_reduced(self): + b = "from itertools import imap, izip, foo" + a = "from itertools import foo" + self.check(b, a) + + b = "from itertools import bar, imap, izip, foo" + a = "from itertools import bar, foo" + self.check(b, a) + + b = "from itertools import chain, imap, izip" + a = "from itertools import chain" + self.check(b, a) + + def test_comments(self): + b = "#foo\nfrom itertools import imap, izip" + a = "#foo\n" + self.check(b, a) + + def test_none(self): + b = "from itertools import imap, izip" + a = "" + self.check(b, a) + + b = "from itertools import izip" + a = "" + self.check(b, a) + + def test_import_as(self): + b = "from itertools import izip, bar as bang, imap" + a = "from itertools import bar as bang" + self.check(b, a) + + b = "from itertools import izip as _zip, imap, bar" + a = "from itertools import bar" + self.check(b, a) + + b = "from itertools import imap as _map" + a = "" + self.check(b, a) + + b = "from itertools import imap as _map, izip as _zip" + a = "" + self.check(b, a) + + s = "from itertools import bar as bang" + self.unchanged(s) + + def test_ifilter(self): + b = "from itertools import ifilterfalse" + a = "from itertools import filterfalse" + self.check(b, a) + + b = "from itertools import imap, ifilterfalse, foo" + a = "from itertools import filterfalse, foo" + self.check(b, a) + + b = "from itertools import bar, ifilterfalse, foo" + a = "from itertools import bar, filterfalse, foo" + self.check(b, a) + + def test_import_star(self): + s = "from itertools import *" + self.unchanged(s) + + + def test_unchanged(self): + s = "from itertools import foo" + self.unchanged(s) + + +class Test_import(FixerTestCase): + fixer = "import" + + def setUp(self): + super(Test_import, self).setUp() + # Need to replace fix_import's exists method + # so we can check that it's doing the right thing + self.files_checked = [] + self.present_files = set() + self.always_exists = True + def fake_exists(name): + self.files_checked.append(name) + return self.always_exists or (name in self.present_files) + + from lib2to3.fixes import fix_import + fix_import.exists = fake_exists + + def tearDown(self): + from lib2to3.fixes import fix_import + fix_import.exists = os.path.exists + + def check_both(self, b, a): + self.always_exists = True + super(Test_import, self).check(b, a) + self.always_exists = False + super(Test_import, self).unchanged(b) + + def test_files_checked(self): + def p(path): + # Takes a unix path and returns a path with correct separators + return os.path.pathsep.join(path.split("/")) + + self.always_exists = False + self.present_files = set(['__init__.py']) + expected_extensions = ('.py', os.path.sep, '.pyc', '.so', '.sl', '.pyd') + names_to_test = (p("/spam/eggs.py"), "ni.py", p("../../shrubbery.py")) + + for name in names_to_test: + self.files_checked = [] + self.filename = name + self.unchanged("import jam") + + if os.path.dirname(name): + name = os.path.dirname(name) + '/jam' + else: + name = 'jam' + expected_checks = set(name + ext for ext in expected_extensions) + expected_checks.add("__init__.py") + + self.assertEqual(set(self.files_checked), expected_checks) + + def test_not_in_package(self): + s = "import bar" + self.always_exists = False + self.present_files = set(["bar.py"]) + self.unchanged(s) + + def test_with_absolute_import_enabled(self): + s = "from __future__ import absolute_import\nimport bar" + self.always_exists = False + self.present_files = set(["__init__.py", "bar.py"]) + self.unchanged(s) + + def test_in_package(self): + b = "import bar" + a = "from . import bar" + self.always_exists = False + self.present_files = set(["__init__.py", "bar.py"]) + self.check(b, a) + + def test_import_from_package(self): + b = "import bar" + a = "from . import bar" + self.always_exists = False + self.present_files = set(["__init__.py", "bar" + os.path.sep]) + self.check(b, a) + + def test_already_relative_import(self): + s = "from . import bar" + self.unchanged(s) + + def test_comments_and_indent(self): + b = "import bar # Foo" + a = "from . import bar # Foo" + self.check(b, a) + + def test_from(self): + b = "from foo import bar, baz" + a = "from .foo import bar, baz" + self.check_both(b, a) + + b = "from foo import bar" + a = "from .foo import bar" + self.check_both(b, a) + + b = "from foo import (bar, baz)" + a = "from .foo import (bar, baz)" + self.check_both(b, a) + + def test_dotted_from(self): + b = "from green.eggs import ham" + a = "from .green.eggs import ham" + self.check_both(b, a) + + def test_from_as(self): + b = "from green.eggs import ham as spam" + a = "from .green.eggs import ham as spam" + self.check_both(b, a) + + def test_import(self): + b = "import foo" + a = "from . import foo" + self.check_both(b, a) + + b = "import foo, bar" + a = "from . import foo, bar" + self.check_both(b, a) + + b = "import foo, bar, x" + a = "from . import foo, bar, x" + self.check_both(b, a) + + b = "import x, y, z" + a = "from . import x, y, z" + self.check_both(b, a) + + def test_import_as(self): + b = "import foo as x" + a = "from . import foo as x" + self.check_both(b, a) + + b = "import a as b, b as c, c as d" + a = "from . import a as b, b as c, c as d" + self.check_both(b, a) + + def test_local_and_absolute(self): + self.always_exists = False + self.present_files = set(["foo.py", "__init__.py"]) + + s = "import foo, bar" + self.warns_unchanged(s, "absolute and local imports together") + + def test_dotted_import(self): + b = "import foo.bar" + a = "from . import foo.bar" + self.check_both(b, a) + + def test_dotted_import_as(self): + b = "import foo.bar as bang" + a = "from . import foo.bar as bang" + self.check_both(b, a) + + def test_prefix(self): + b = """ + # prefix + import foo.bar + """ + a = """ + # prefix + from . import foo.bar + """ + self.check_both(b, a) + + +class Test_set_literal(FixerTestCase): + + fixer = "set_literal" + + def test_basic(self): + b = """set([1, 2, 3])""" + a = """{1, 2, 3}""" + self.check(b, a) + + b = """set((1, 2, 3))""" + a = """{1, 2, 3}""" + self.check(b, a) + + b = """set((1,))""" + a = """{1}""" + self.check(b, a) + + b = """set([1])""" + self.check(b, a) + + b = """set((a, b))""" + a = """{a, b}""" + self.check(b, a) + + b = """set([a, b])""" + self.check(b, a) + + b = """set((a*234, f(args=23)))""" + a = """{a*234, f(args=23)}""" + self.check(b, a) + + b = """set([a*23, f(23)])""" + a = """{a*23, f(23)}""" + self.check(b, a) + + b = """set([a-234**23])""" + a = """{a-234**23}""" + self.check(b, a) + + def test_listcomps(self): + b = """set([x for x in y])""" + a = """{x for x in y}""" + self.check(b, a) + + b = """set([x for x in y if x == m])""" + a = """{x for x in y if x == m}""" + self.check(b, a) + + b = """set([x for x in y for a in b])""" + a = """{x for x in y for a in b}""" + self.check(b, a) + + b = """set([f(x) - 23 for x in y])""" + a = """{f(x) - 23 for x in y}""" + self.check(b, a) + + def test_whitespace(self): + b = """set( [1, 2])""" + a = """{1, 2}""" + self.check(b, a) + + b = """set([1 , 2])""" + a = """{1 , 2}""" + self.check(b, a) + + b = """set([ 1 ])""" + a = """{ 1 }""" + self.check(b, a) + + b = """set( [1] )""" + a = """{1}""" + self.check(b, a) + + b = """set([ 1, 2 ])""" + a = """{ 1, 2 }""" + self.check(b, a) + + b = """set([x for x in y ])""" + a = """{x for x in y }""" + self.check(b, a) + + b = """set( + [1, 2] + ) + """ + a = """{1, 2}\n""" + self.check(b, a) + + def test_comments(self): + b = """set((1, 2)) # Hi""" + a = """{1, 2} # Hi""" + self.check(b, a) + + # This isn't optimal behavior, but the fixer is optional. + b = """ + # Foo + set( # Bar + (1, 2) + ) + """ + a = """ + # Foo + {1, 2} + """ + self.check(b, a) + + def test_unchanged(self): + s = """set()""" + self.unchanged(s) + + s = """set(a)""" + self.unchanged(s) + + s = """set(a, b, c)""" + self.unchanged(s) + + # Don't transform generators because they might have to be lazy. + s = """set(x for x in y)""" + self.unchanged(s) + + s = """set(x for x in y if z)""" + self.unchanged(s) + + s = """set(a*823-23**2 + f(23))""" + self.unchanged(s) + + +class Test_sys_exc(FixerTestCase): + fixer = "sys_exc" + + def test_0(self): + b = "sys.exc_type" + a = "sys.exc_info()[0]" + self.check(b, a) + + def test_1(self): + b = "sys.exc_value" + a = "sys.exc_info()[1]" + self.check(b, a) + + def test_2(self): + b = "sys.exc_traceback" + a = "sys.exc_info()[2]" + self.check(b, a) + + def test_3(self): + b = "sys.exc_type # Foo" + a = "sys.exc_info()[0] # Foo" + self.check(b, a) + + def test_4(self): + b = "sys. exc_type" + a = "sys. exc_info()[0]" + self.check(b, a) + + def test_5(self): + b = "sys .exc_type" + a = "sys .exc_info()[0]" + self.check(b, a) + + +class Test_paren(FixerTestCase): + fixer = "paren" + + def test_0(self): + b = """[i for i in 1, 2 ]""" + a = """[i for i in (1, 2) ]""" + self.check(b, a) + + def test_1(self): + b = """[i for i in 1, 2, ]""" + a = """[i for i in (1, 2,) ]""" + self.check(b, a) + + def test_2(self): + b = """[i for i in 1, 2 ]""" + a = """[i for i in (1, 2) ]""" + self.check(b, a) + + def test_3(self): + b = """[i for i in 1, 2 if i]""" + a = """[i for i in (1, 2) if i]""" + self.check(b, a) + + def test_4(self): + b = """[i for i in 1, 2 ]""" + a = """[i for i in (1, 2) ]""" + self.check(b, a) + + def test_5(self): + b = """(i for i in 1, 2)""" + a = """(i for i in (1, 2))""" + self.check(b, a) + + def test_6(self): + b = """(i for i in 1 ,2 if i)""" + a = """(i for i in (1 ,2) if i)""" + self.check(b, a) + + def test_unchanged_0(self): + s = """[i for i in (1, 2)]""" + self.unchanged(s) + + def test_unchanged_1(self): + s = """[i for i in foo()]""" + self.unchanged(s) + + def test_unchanged_2(self): + s = """[i for i in (1, 2) if nothing]""" + self.unchanged(s) + + def test_unchanged_3(self): + s = """(i for i in (1, 2))""" + self.unchanged(s) + + def test_unchanged_4(self): + s = """[i for i in m]""" + self.unchanged(s) + +class Test_metaclass(FixerTestCase): + + fixer = 'metaclass' + + def test_unchanged(self): + self.unchanged("class X(): pass") + self.unchanged("class X(object): pass") + self.unchanged("class X(object1, object2): pass") + self.unchanged("class X(object1, object2, object3): pass") + self.unchanged("class X(metaclass=Meta): pass") + self.unchanged("class X(b, arg=23, metclass=Meta): pass") + self.unchanged("class X(b, arg=23, metaclass=Meta, other=42): pass") + + s = """ + class X: + def __metaclass__(self): pass + """ + self.unchanged(s) + + s = """ + class X: + a[23] = 74 + """ + self.unchanged(s) + + def test_comments(self): + b = """ + class X: + # hi + __metaclass__ = AppleMeta + """ + a = """ + class X(metaclass=AppleMeta): + # hi + pass + """ + self.check(b, a) + + b = """ + class X: + __metaclass__ = Meta + # Bedtime! + """ + a = """ + class X(metaclass=Meta): + pass + # Bedtime! + """ + self.check(b, a) + + def test_meta(self): + # no-parent class, odd body + b = """ + class X(): + __metaclass__ = Q + pass + """ + a = """ + class X(metaclass=Q): + pass + """ + self.check(b, a) + + # one parent class, no body + b = """class X(object): __metaclass__ = Q""" + a = """class X(object, metaclass=Q): pass""" + self.check(b, a) + + + # one parent, simple body + b = """ + class X(object): + __metaclass__ = Meta + bar = 7 + """ + a = """ + class X(object, metaclass=Meta): + bar = 7 + """ + self.check(b, a) + + b = """ + class X: + __metaclass__ = Meta; x = 4; g = 23 + """ + a = """ + class X(metaclass=Meta): + x = 4; g = 23 + """ + self.check(b, a) + + # one parent, simple body, __metaclass__ last + b = """ + class X(object): + bar = 7 + __metaclass__ = Meta + """ + a = """ + class X(object, metaclass=Meta): + bar = 7 + """ + self.check(b, a) + + # redefining __metaclass__ + b = """ + class X(): + __metaclass__ = A + __metaclass__ = B + bar = 7 + """ + a = """ + class X(metaclass=B): + bar = 7 + """ + self.check(b, a) + + # multiple inheritance, simple body + b = """ + class X(clsA, clsB): + __metaclass__ = Meta + bar = 7 + """ + a = """ + class X(clsA, clsB, metaclass=Meta): + bar = 7 + """ + self.check(b, a) + + # keywords in the class statement + b = """class m(a, arg=23): __metaclass__ = Meta""" + a = """class m(a, arg=23, metaclass=Meta): pass""" + self.check(b, a) + + b = """ + class X(expression(2 + 4)): + __metaclass__ = Meta + """ + a = """ + class X(expression(2 + 4), metaclass=Meta): + pass + """ + self.check(b, a) + + b = """ + class X(expression(2 + 4), x**4): + __metaclass__ = Meta + """ + a = """ + class X(expression(2 + 4), x**4, metaclass=Meta): + pass + """ + self.check(b, a) + + b = """ + class X: + __metaclass__ = Meta + save.py = 23 + """ + a = """ + class X(metaclass=Meta): + save.py = 23 + """ + self.check(b, a) + + +class Test_getcwdu(FixerTestCase): + + fixer = 'getcwdu' + + def test_basic(self): + b = """os.getcwdu""" + a = """os.getcwd""" + self.check(b, a) + + b = """os.getcwdu()""" + a = """os.getcwd()""" + self.check(b, a) + + b = """meth = os.getcwdu""" + a = """meth = os.getcwd""" + self.check(b, a) + + b = """os.getcwdu(args)""" + a = """os.getcwd(args)""" + self.check(b, a) + + def test_comment(self): + b = """os.getcwdu() # Foo""" + a = """os.getcwd() # Foo""" + self.check(b, a) + + def test_unchanged(self): + s = """os.getcwd()""" + self.unchanged(s) + + s = """getcwdu()""" + self.unchanged(s) + + s = """os.getcwdb()""" + self.unchanged(s) + + def test_indentation(self): + b = """ + if 1: + os.getcwdu() + """ + a = """ + if 1: + os.getcwd() + """ + self.check(b, a) + + def test_multilation(self): + b = """os .getcwdu()""" + a = """os .getcwd()""" + self.check(b, a) + + b = """os. getcwdu""" + a = """os. getcwd""" + self.check(b, a) + + b = """os.getcwdu ( )""" + a = """os.getcwd ( )""" + self.check(b, a) + + +class Test_operator(FixerTestCase): + + fixer = "operator" + + def test_operator_isCallable(self): + b = "operator.isCallable(x)" + a = "hasattr(x, '__call__')" + self.check(b, a) + + def test_operator_sequenceIncludes(self): + b = "operator.sequenceIncludes(x, y)" + a = "operator.contains(x, y)" + self.check(b, a) + + b = "operator .sequenceIncludes(x, y)" + a = "operator .contains(x, y)" + self.check(b, a) + + b = "operator. sequenceIncludes(x, y)" + a = "operator. contains(x, y)" + self.check(b, a) + + def test_operator_isSequenceType(self): + b = "operator.isSequenceType(x)" + a = "import collections\nisinstance(x, collections.Sequence)" + self.check(b, a) + + def test_operator_isMappingType(self): + b = "operator.isMappingType(x)" + a = "import collections\nisinstance(x, collections.Mapping)" + self.check(b, a) + + def test_operator_isNumberType(self): + b = "operator.isNumberType(x)" + a = "import numbers\nisinstance(x, numbers.Number)" + self.check(b, a) + + def test_operator_repeat(self): + b = "operator.repeat(x, n)" + a = "operator.mul(x, n)" + self.check(b, a) + + b = "operator .repeat(x, n)" + a = "operator .mul(x, n)" + self.check(b, a) + + b = "operator. repeat(x, n)" + a = "operator. mul(x, n)" + self.check(b, a) + + def test_operator_irepeat(self): + b = "operator.irepeat(x, n)" + a = "operator.imul(x, n)" + self.check(b, a) + + b = "operator .irepeat(x, n)" + a = "operator .imul(x, n)" + self.check(b, a) + + b = "operator. irepeat(x, n)" + a = "operator. imul(x, n)" + self.check(b, a) + + def test_bare_isCallable(self): + s = "isCallable(x)" + t = "You should use 'hasattr(x, '__call__')' here." + self.warns_unchanged(s, t) + + def test_bare_sequenceIncludes(self): + s = "sequenceIncludes(x, y)" + t = "You should use 'operator.contains(x, y)' here." + self.warns_unchanged(s, t) + + def test_bare_operator_isSequenceType(self): + s = "isSequenceType(z)" + t = "You should use 'isinstance(z, collections.Sequence)' here." + self.warns_unchanged(s, t) + + def test_bare_operator_isMappingType(self): + s = "isMappingType(x)" + t = "You should use 'isinstance(x, collections.Mapping)' here." + self.warns_unchanged(s, t) + + def test_bare_operator_isNumberType(self): + s = "isNumberType(y)" + t = "You should use 'isinstance(y, numbers.Number)' here." + self.warns_unchanged(s, t) + + def test_bare_operator_repeat(self): + s = "repeat(x, n)" + t = "You should use 'operator.mul(x, n)' here." + self.warns_unchanged(s, t) + + def test_bare_operator_irepeat(self): + s = "irepeat(y, 187)" + t = "You should use 'operator.imul(y, 187)' here." + self.warns_unchanged(s, t) + + +class Test_exitfunc(FixerTestCase): + + fixer = "exitfunc" + + def test_simple(self): + b = """ + import sys + sys.exitfunc = my_atexit + """ + a = """ + import sys + import atexit + atexit.register(my_atexit) + """ + self.check(b, a) + + def test_names_import(self): + b = """ + import sys, crumbs + sys.exitfunc = my_func + """ + a = """ + import sys, crumbs, atexit + atexit.register(my_func) + """ + self.check(b, a) + + def test_complex_expression(self): + b = """ + import sys + sys.exitfunc = do(d)/a()+complex(f=23, g=23)*expression + """ + a = """ + import sys + import atexit + atexit.register(do(d)/a()+complex(f=23, g=23)*expression) + """ + self.check(b, a) + + def test_comments(self): + b = """ + import sys # Foo + sys.exitfunc = f # Blah + """ + a = """ + import sys + import atexit # Foo + atexit.register(f) # Blah + """ + self.check(b, a) + + b = """ + import apples, sys, crumbs, larry # Pleasant comments + sys.exitfunc = func + """ + a = """ + import apples, sys, crumbs, larry, atexit # Pleasant comments + atexit.register(func) + """ + self.check(b, a) + + def test_in_a_function(self): + b = """ + import sys + def f(): + sys.exitfunc = func + """ + a = """ + import sys + import atexit + def f(): + atexit.register(func) + """ + self.check(b, a) + + def test_no_sys_import(self): + b = """sys.exitfunc = f""" + a = """atexit.register(f)""" + msg = ("Can't find sys import; Please add an atexit import at the " + "top of your file.") + self.warns(b, a, msg) + + + def test_unchanged(self): + s = """f(sys.exitfunc)""" + self.unchanged(s) diff --git a/lib3/2to3/lib2to3/tests/test_main.py b/lib3/2to3/lib2to3/tests/test_main.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/test_main.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +import sys +import codecs +import logging +import io +import unittest + +from lib2to3 import main + + +class TestMain(unittest.TestCase): + + def tearDown(self): + # Clean up logging configuration down by main. + del logging.root.handlers[:] + + def run_2to3_capture(self, args, in_capture, out_capture, err_capture): + save_stdin = sys.stdin + save_stdout = sys.stdout + save_stderr = sys.stderr + sys.stdin = in_capture + sys.stdout = out_capture + sys.stderr = err_capture + try: + return main.main("lib2to3.fixes", args) + finally: + sys.stdin = save_stdin + sys.stdout = save_stdout + sys.stderr = save_stderr + + def test_unencodable_diff(self): + input_stream = io.StringIO("print 'nothing'\nprint u'??ber'\n") + out = io.StringIO() + out_enc = codecs.getwriter("ascii")(out) + err = io.StringIO() + ret = self.run_2to3_capture(["-"], input_stream, out_enc, err) + self.assertEqual(ret, 0) + output = out.getvalue() + self.assertTrue("-print 'nothing'" in output) + self.assertTrue("WARNING: couldn't encode 's diff for " + "your terminal" in err.getvalue()) diff --git a/lib3/2to3/lib2to3/tests/test_parser.py b/lib3/2to3/lib2to3/tests/test_parser.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/test_parser.py @@ -0,0 +1,217 @@ +"""Test suite for 2to3's parser and grammar files. + +This is the place to add tests for changes to 2to3's grammar, such as those +merging the grammars for Python 2 and 3. In addition to specific tests for +parts of the grammar we've changed, we also make sure we can parse the +test_grammar.py files from both Python 2 and Python 3. +""" + + + +# Testing imports +from . import support +from .support import driver, test_dir + +# Python imports +import os +import sys + +# Local imports +from lib2to3.pgen2 import tokenize +from ..pgen2.parse import ParseError + + +class GrammarTest(support.TestCase): + def validate(self, code): + support.parse_string(code) + + def invalid_syntax(self, code): + try: + self.validate(code) + except ParseError: + pass + else: + raise AssertionError("Syntax shouldn't have been valid") + + +class TestRaiseChanges(GrammarTest): + def test_2x_style_1(self): + self.validate("raise") + + def test_2x_style_2(self): + self.validate("raise E, V") + + def test_2x_style_3(self): + self.validate("raise E, V, T") + + def test_2x_style_invalid_1(self): + self.invalid_syntax("raise E, V, T, Z") + + def test_3x_style(self): + self.validate("raise E1 from E2") + + def test_3x_style_invalid_1(self): + self.invalid_syntax("raise E, V from E1") + + def test_3x_style_invalid_2(self): + self.invalid_syntax("raise E from E1, E2") + + def test_3x_style_invalid_3(self): + self.invalid_syntax("raise from E1, E2") + + def test_3x_style_invalid_4(self): + self.invalid_syntax("raise E from") + + +# Adapated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef +class TestFunctionAnnotations(GrammarTest): + def test_1(self): + self.validate("""def f(x) -> list: pass""") + + def test_2(self): + self.validate("""def f(x:int): pass""") + + def test_3(self): + self.validate("""def f(*x:str): pass""") + + def test_4(self): + self.validate("""def f(**x:float): pass""") + + def test_5(self): + self.validate("""def f(x, y:1+2): pass""") + + def test_6(self): + self.validate("""def f(a, (b:1, c:2, d)): pass""") + + def test_7(self): + self.validate("""def f(a, (b:1, c:2, d), e:3=4, f=5, *g:6): pass""") + + def test_8(self): + s = """def f(a, (b:1, c:2, d), e:3=4, f=5, + *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass""" + self.validate(s) + + +class TestExcept(GrammarTest): + def test_new(self): + s = """ + try: + x + except E as N: + y""" + self.validate(s) + + def test_old(self): + s = """ + try: + x + except E, N: + y""" + self.validate(s) + + +# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testAtoms +class TestSetLiteral(GrammarTest): + def test_1(self): + self.validate("""x = {'one'}""") + + def test_2(self): + self.validate("""x = {'one', 1,}""") + + def test_3(self): + self.validate("""x = {'one', 'two', 'three'}""") + + def test_4(self): + self.validate("""x = {2, 3, 4,}""") + + +class TestNumericLiterals(GrammarTest): + def test_new_octal_notation(self): + self.validate("""0o7777777777777""") + self.invalid_syntax("""0o7324528887""") + + def test_new_binary_notation(self): + self.validate("""0b101010""") + self.invalid_syntax("""0b0101021""") + + +class TestClassDef(GrammarTest): + def test_new_syntax(self): + self.validate("class B(t=7): pass") + self.validate("class B(t, *args): pass") + self.validate("class B(t, **kwargs): pass") + self.validate("class B(t, *args, **kwargs): pass") + self.validate("class B(t, y=9, *args, **kwargs): pass") + + +class TestParserIdempotency(support.TestCase): + + """A cut-down version of pytree_idempotency.py.""" + + def test_all_project_files(self): + if sys.platform.startswith("win"): + # XXX something with newlines goes wrong on Windows. + return + for filepath in support.all_project_files(): + with open(filepath, "rb") as fp: + encoding = tokenize.detect_encoding(fp.readline)[0] + self.assertTrue(encoding is not None, + "can't detect encoding for %s" % filepath) + with open(filepath, "r") as fp: + source = fp.read() + source = source.decode(encoding) + tree = driver.parse_string(source) + new = str(tree) + if diff(filepath, new, encoding): + self.fail("Idempotency failed: %s" % filepath) + + def test_extended_unpacking(self): + driver.parse_string("a, *b, c = x\n") + driver.parse_string("[*a, b] = x\n") + driver.parse_string("(z, *y, w) = m\n") + driver.parse_string("for *z, m in d: pass\n") + +class TestLiterals(GrammarTest): + + def validate(self, s): + driver.parse_string(support.dedent(s) + "\n\n") + + def test_multiline_bytes_literals(self): + s = """ + md5test(b"\xaa" * 80, + (b"Test Using Larger Than Block-Size Key " + b"and Larger Than One Block-Size Data"), + "6f630fad67cda0ee1fb1f562db3aa53e") + """ + self.validate(s) + + def test_multiline_bytes_tripquote_literals(self): + s = ''' + b""" + + + """ + ''' + self.validate(s) + + def test_multiline_str_literals(self): + s = """ + md5test("\xaa" * 80, + ("Test Using Larger Than Block-Size Key " + "and Larger Than One Block-Size Data"), + "6f630fad67cda0ee1fb1f562db3aa53e") + """ + self.validate(s) + + +def diff(fn, result, encoding): + f = open("@", "w") + try: + f.write(result.encode(encoding)) + finally: + f.close() + try: + fn = fn.replace('"', '\\"') + return os.system('diff -u "%s" @' % fn) + finally: + os.remove("@") diff --git a/lib3/2to3/lib2to3/tests/test_pytree.py b/lib3/2to3/lib2to3/tests/test_pytree.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/test_pytree.py @@ -0,0 +1,494 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Unit tests for pytree.py. + +NOTE: Please *don't* add doc strings to individual test methods! +In verbose mode, printing of the module, class and method name is much +more helpful than printing of (the first line of) the docstring, +especially when debugging a test. +""" + + + +import sys +import warnings + +# Testing imports +from . import support + +from lib2to3 import pytree + +try: + sorted +except NameError: + def sorted(lst): + l = list(lst) + l.sort() + return l + +class TestNodes(support.TestCase): + + """Unit tests for nodes (Base, Leaf, Node).""" + + if sys.version_info >= (2,6): + # warnings.catch_warnings is new in 2.6. + def test_deprecated_prefix_methods(self): + l = pytree.Leaf(100, "foo") + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always", DeprecationWarning) + self.assertEqual(l.get_prefix(), "") + l.set_prefix("hi") + self.assertEqual(l.prefix, "hi") + self.assertEqual(len(w), 2) + for warning in w: + self.assertTrue(warning.category is DeprecationWarning) + self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \ + "use the prefix property") + self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \ + "use the prefix property") + + def test_instantiate_base(self): + if __debug__: + # Test that instantiating Base() raises an AssertionError + self.assertRaises(AssertionError, pytree.Base) + + def test_leaf(self): + l1 = pytree.Leaf(100, "foo") + self.assertEqual(l1.type, 100) + self.assertEqual(l1.value, "foo") + + def test_leaf_repr(self): + l1 = pytree.Leaf(100, "foo") + self.assertEqual(repr(l1), "Leaf(100, 'foo')") + + def test_leaf_str(self): + l1 = pytree.Leaf(100, "foo") + self.assertEqual(str(l1), "foo") + l2 = pytree.Leaf(100, "foo", context=(" ", (10, 1))) + self.assertEqual(str(l2), " foo") + + def test_leaf_str_numeric_value(self): + # Make sure that the Leaf's value is stringified. Failing to + # do this can cause a TypeError in certain situations. + l1 = pytree.Leaf(2, 5) + l1.prefix = "foo_" + self.assertEqual(str(l1), "foo_5") + + def test_leaf_equality(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "foo", context=(" ", (1, 0))) + self.assertEqual(l1, l2) + l3 = pytree.Leaf(101, "foo") + l4 = pytree.Leaf(100, "bar") + self.assertNotEqual(l1, l3) + self.assertNotEqual(l1, l4) + + def test_leaf_prefix(self): + l1 = pytree.Leaf(100, "foo") + self.assertEqual(l1.prefix, "") + self.assertFalse(l1.was_changed) + l1.prefix = " ##\n\n" + self.assertEqual(l1.prefix, " ##\n\n") + self.assertTrue(l1.was_changed) + + def test_node(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(200, "bar") + n1 = pytree.Node(1000, [l1, l2]) + self.assertEqual(n1.type, 1000) + self.assertEqual(n1.children, [l1, l2]) + + def test_node_repr(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0))) + n1 = pytree.Node(1000, [l1, l2]) + self.assertEqual(repr(n1), + "Node(1000, [%s, %s])" % (repr(l1), repr(l2))) + + def test_node_str(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0))) + n1 = pytree.Node(1000, [l1, l2]) + self.assertEqual(str(n1), "foo bar") + + def test_node_prefix(self): + l1 = pytree.Leaf(100, "foo") + self.assertEqual(l1.prefix, "") + n1 = pytree.Node(1000, [l1]) + self.assertEqual(n1.prefix, "") + n1.prefix = " " + self.assertEqual(n1.prefix, " ") + self.assertEqual(l1.prefix, " ") + + def test_get_suffix(self): + l1 = pytree.Leaf(100, "foo", prefix="a") + l2 = pytree.Leaf(100, "bar", prefix="b") + n1 = pytree.Node(1000, [l1, l2]) + + self.assertEqual(l1.get_suffix(), l2.prefix) + self.assertEqual(l2.get_suffix(), "") + self.assertEqual(n1.get_suffix(), "") + + l3 = pytree.Leaf(100, "bar", prefix="c") + n2 = pytree.Node(1000, [n1, l3]) + + self.assertEqual(n1.get_suffix(), l3.prefix) + self.assertEqual(l3.get_suffix(), "") + self.assertEqual(n2.get_suffix(), "") + + def test_node_equality(self): + n1 = pytree.Node(1000, ()) + n2 = pytree.Node(1000, [], context=(" ", (1, 0))) + self.assertEqual(n1, n2) + n3 = pytree.Node(1001, ()) + self.assertNotEqual(n1, n3) + + def test_node_recursive_equality(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1]) + n2 = pytree.Node(1000, [l2]) + self.assertEqual(n1, n2) + l3 = pytree.Leaf(100, "bar") + n3 = pytree.Node(1000, [l3]) + self.assertNotEqual(n1, n3) + + def test_replace(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "+") + l3 = pytree.Leaf(100, "bar") + n1 = pytree.Node(1000, [l1, l2, l3]) + self.assertEqual(n1.children, [l1, l2, l3]) + self.assertTrue(isinstance(n1.children, list)) + self.assertFalse(n1.was_changed) + l2new = pytree.Leaf(100, "-") + l2.replace(l2new) + self.assertEqual(n1.children, [l1, l2new, l3]) + self.assertTrue(isinstance(n1.children, list)) + self.assertTrue(n1.was_changed) + + def test_replace_with_list(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "+") + l3 = pytree.Leaf(100, "bar") + n1 = pytree.Node(1000, [l1, l2, l3]) + + l2.replace([pytree.Leaf(100, "*"), pytree.Leaf(100, "*")]) + self.assertEqual(str(n1), "foo**bar") + self.assertTrue(isinstance(n1.children, list)) + + def test_leaves(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + l3 = pytree.Leaf(100, "fooey") + n2 = pytree.Node(1000, [l1, l2]) + n3 = pytree.Node(1000, [l3]) + n1 = pytree.Node(1000, [n2, n3]) + + self.assertEqual(list(n1.leaves()), [l1, l2, l3]) + + def test_depth(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + n2 = pytree.Node(1000, [l1, l2]) + n3 = pytree.Node(1000, []) + n1 = pytree.Node(1000, [n2, n3]) + + self.assertEqual(l1.depth(), 2) + self.assertEqual(n3.depth(), 1) + self.assertEqual(n1.depth(), 0) + + def test_post_order(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + l3 = pytree.Leaf(100, "fooey") + c1 = pytree.Node(1000, [l1, l2]) + n1 = pytree.Node(1000, [c1, l3]) + self.assertEqual(list(n1.post_order()), [l1, l2, c1, l3, n1]) + + def test_pre_order(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + l3 = pytree.Leaf(100, "fooey") + c1 = pytree.Node(1000, [l1, l2]) + n1 = pytree.Node(1000, [c1, l3]) + self.assertEqual(list(n1.pre_order()), [n1, c1, l1, l2, l3]) + + def test_changed(self): + l1 = pytree.Leaf(100, "f") + self.assertFalse(l1.was_changed) + l1.changed() + self.assertTrue(l1.was_changed) + + l1 = pytree.Leaf(100, "f") + n1 = pytree.Node(1000, [l1]) + self.assertFalse(n1.was_changed) + n1.changed() + self.assertTrue(n1.was_changed) + + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "+") + l3 = pytree.Leaf(100, "bar") + n1 = pytree.Node(1000, [l1, l2, l3]) + n2 = pytree.Node(1000, [n1]) + self.assertFalse(l1.was_changed) + self.assertFalse(n1.was_changed) + self.assertFalse(n2.was_changed) + + n1.changed() + self.assertTrue(n1.was_changed) + self.assertTrue(n2.was_changed) + self.assertFalse(l1.was_changed) + + def test_leaf_constructor_prefix(self): + for prefix in ("xyz_", ""): + l1 = pytree.Leaf(100, "self", prefix=prefix) + self.assertTrue(str(l1), prefix + "self") + self.assertEqual(l1.prefix, prefix) + + def test_node_constructor_prefix(self): + for prefix in ("xyz_", ""): + l1 = pytree.Leaf(100, "self") + l2 = pytree.Leaf(100, "foo", prefix="_") + n1 = pytree.Node(1000, [l1, l2], prefix=prefix) + self.assertTrue(str(n1), prefix + "self_foo") + self.assertEqual(n1.prefix, prefix) + self.assertEqual(l1.prefix, prefix) + self.assertEqual(l2.prefix, "_") + + def test_remove(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1, l2]) + n2 = pytree.Node(1000, [n1]) + + self.assertEqual(n1.remove(), 0) + self.assertEqual(n2.children, []) + self.assertEqual(l1.parent, n1) + self.assertEqual(n1.parent, None) + self.assertEqual(n2.parent, None) + self.assertFalse(n1.was_changed) + self.assertTrue(n2.was_changed) + + self.assertEqual(l2.remove(), 1) + self.assertEqual(l1.remove(), 0) + self.assertEqual(n1.children, []) + self.assertEqual(l1.parent, None) + self.assertEqual(n1.parent, None) + self.assertEqual(n2.parent, None) + self.assertTrue(n1.was_changed) + self.assertTrue(n2.was_changed) + + def test_remove_parentless(self): + n1 = pytree.Node(1000, []) + n1.remove() + self.assertEqual(n1.parent, None) + + l1 = pytree.Leaf(100, "foo") + l1.remove() + self.assertEqual(l1.parent, None) + + def test_node_set_child(self): + l1 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1]) + + l2 = pytree.Leaf(100, "bar") + n1.set_child(0, l2) + self.assertEqual(l1.parent, None) + self.assertEqual(l2.parent, n1) + self.assertEqual(n1.children, [l2]) + + n2 = pytree.Node(1000, [l1]) + n2.set_child(0, n1) + self.assertEqual(l1.parent, None) + self.assertEqual(n1.parent, n2) + self.assertEqual(n2.parent, None) + self.assertEqual(n2.children, [n1]) + + self.assertRaises(IndexError, n1.set_child, 4, l2) + # I don't care what it raises, so long as it's an exception + self.assertRaises(Exception, n1.set_child, 0, list) + + def test_node_insert_child(self): + l1 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1]) + + l2 = pytree.Leaf(100, "bar") + n1.insert_child(0, l2) + self.assertEqual(l2.parent, n1) + self.assertEqual(n1.children, [l2, l1]) + + l3 = pytree.Leaf(100, "abc") + n1.insert_child(2, l3) + self.assertEqual(n1.children, [l2, l1, l3]) + + # I don't care what it raises, so long as it's an exception + self.assertRaises(Exception, n1.insert_child, 0, list) + + def test_node_append_child(self): + n1 = pytree.Node(1000, []) + + l1 = pytree.Leaf(100, "foo") + n1.append_child(l1) + self.assertEqual(l1.parent, n1) + self.assertEqual(n1.children, [l1]) + + l2 = pytree.Leaf(100, "bar") + n1.append_child(l2) + self.assertEqual(l2.parent, n1) + self.assertEqual(n1.children, [l1, l2]) + + # I don't care what it raises, so long as it's an exception + self.assertRaises(Exception, n1.append_child, list) + + def test_node_next_sibling(self): + n1 = pytree.Node(1000, []) + n2 = pytree.Node(1000, []) + p1 = pytree.Node(1000, [n1, n2]) + + self.assertTrue(n1.next_sibling is n2) + self.assertEqual(n2.next_sibling, None) + self.assertEqual(p1.next_sibling, None) + + def test_leaf_next_sibling(self): + l1 = pytree.Leaf(100, "a") + l2 = pytree.Leaf(100, "b") + p1 = pytree.Node(1000, [l1, l2]) + + self.assertTrue(l1.next_sibling is l2) + self.assertEqual(l2.next_sibling, None) + self.assertEqual(p1.next_sibling, None) + + def test_node_prev_sibling(self): + n1 = pytree.Node(1000, []) + n2 = pytree.Node(1000, []) + p1 = pytree.Node(1000, [n1, n2]) + + self.assertTrue(n2.prev_sibling is n1) + self.assertEqual(n1.prev_sibling, None) + self.assertEqual(p1.prev_sibling, None) + + def test_leaf_prev_sibling(self): + l1 = pytree.Leaf(100, "a") + l2 = pytree.Leaf(100, "b") + p1 = pytree.Node(1000, [l1, l2]) + + self.assertTrue(l2.prev_sibling is l1) + self.assertEqual(l1.prev_sibling, None) + self.assertEqual(p1.prev_sibling, None) + + +class TestPatterns(support.TestCase): + + """Unit tests for tree matching patterns.""" + + def test_basic_patterns(self): + # Build a tree + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + l3 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1, l2]) + n2 = pytree.Node(1000, [l3]) + root = pytree.Node(1000, [n1, n2]) + # Build a pattern matching a leaf + pl = pytree.LeafPattern(100, "foo", name="pl") + r = {} + self.assertFalse(pl.match(root, results=r)) + self.assertEqual(r, {}) + self.assertFalse(pl.match(n1, results=r)) + self.assertEqual(r, {}) + self.assertFalse(pl.match(n2, results=r)) + self.assertEqual(r, {}) + self.assertTrue(pl.match(l1, results=r)) + self.assertEqual(r, {"pl": l1}) + r = {} + self.assertFalse(pl.match(l2, results=r)) + self.assertEqual(r, {}) + # Build a pattern matching a node + pn = pytree.NodePattern(1000, [pl], name="pn") + self.assertFalse(pn.match(root, results=r)) + self.assertEqual(r, {}) + self.assertFalse(pn.match(n1, results=r)) + self.assertEqual(r, {}) + self.assertTrue(pn.match(n2, results=r)) + self.assertEqual(r, {"pn": n2, "pl": l3}) + r = {} + self.assertFalse(pn.match(l1, results=r)) + self.assertEqual(r, {}) + self.assertFalse(pn.match(l2, results=r)) + self.assertEqual(r, {}) + + def test_wildcard(self): + # Build a tree for testing + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + l3 = pytree.Leaf(100, "foo") + n1 = pytree.Node(1000, [l1, l2]) + n2 = pytree.Node(1000, [l3]) + root = pytree.Node(1000, [n1, n2]) + # Build a pattern + pl = pytree.LeafPattern(100, "foo", name="pl") + pn = pytree.NodePattern(1000, [pl], name="pn") + pw = pytree.WildcardPattern([[pn], [pl, pl]], name="pw") + r = {} + self.assertFalse(pw.match_seq([root], r)) + self.assertEqual(r, {}) + self.assertFalse(pw.match_seq([n1], r)) + self.assertEqual(r, {}) + self.assertTrue(pw.match_seq([n2], r)) + # These are easier to debug + self.assertEqual(sorted(r.keys()), ["pl", "pn", "pw"]) + self.assertEqual(r["pl"], l1) + self.assertEqual(r["pn"], n2) + self.assertEqual(r["pw"], [n2]) + # But this is equivalent + self.assertEqual(r, {"pl": l1, "pn": n2, "pw": [n2]}) + r = {} + self.assertTrue(pw.match_seq([l1, l3], r)) + self.assertEqual(r, {"pl": l3, "pw": [l1, l3]}) + self.assertTrue(r["pl"] is l3) + r = {} + + def test_generate_matches(self): + la = pytree.Leaf(1, "a") + lb = pytree.Leaf(1, "b") + lc = pytree.Leaf(1, "c") + ld = pytree.Leaf(1, "d") + le = pytree.Leaf(1, "e") + lf = pytree.Leaf(1, "f") + leaves = [la, lb, lc, ld, le, lf] + root = pytree.Node(1000, leaves) + pa = pytree.LeafPattern(1, "a", "pa") + pb = pytree.LeafPattern(1, "b", "pb") + pc = pytree.LeafPattern(1, "c", "pc") + pd = pytree.LeafPattern(1, "d", "pd") + pe = pytree.LeafPattern(1, "e", "pe") + pf = pytree.LeafPattern(1, "f", "pf") + pw = pytree.WildcardPattern([[pa, pb, pc], [pd, pe], + [pa, pb], [pc, pd], [pe, pf]], + min=1, max=4, name="pw") + self.assertEqual([x[0] for x in pw.generate_matches(leaves)], + [3, 5, 2, 4, 6]) + pr = pytree.NodePattern(type=1000, content=[pw], name="pr") + matches = list(pytree.generate_matches([pr], [root])) + self.assertEqual(len(matches), 1) + c, r = matches[0] + self.assertEqual(c, 1) + self.assertEqual(str(r["pr"]), "abcdef") + self.assertEqual(r["pw"], [la, lb, lc, ld, le, lf]) + for c in "abcdef": + self.assertEqual(r["p" + c], pytree.Leaf(1, c)) + + def test_has_key_example(self): + pattern = pytree.NodePattern(331, + (pytree.LeafPattern(7), + pytree.WildcardPattern(name="args"), + pytree.LeafPattern(8))) + l1 = pytree.Leaf(7, "(") + l2 = pytree.Leaf(3, "x") + l3 = pytree.Leaf(8, ")") + node = pytree.Node(331, [l1, l2, l3]) + r = {} + self.assertTrue(pattern.match(node, r)) + self.assertEqual(r["args"], [l2]) diff --git a/lib3/2to3/lib2to3/tests/test_refactor.py b/lib3/2to3/lib2to3/tests/test_refactor.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/test_refactor.py @@ -0,0 +1,281 @@ +""" +Unit tests for refactor.py. +""" + + + +import sys +import os +import codecs +import operator +import io +import tempfile +import shutil +import unittest +import warnings + +from lib2to3 import refactor, pygram, fixer_base +from lib2to3.pgen2 import token + +from . import support + + +TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "data") +FIXER_DIR = os.path.join(TEST_DATA_DIR, "fixers") + +sys.path.append(FIXER_DIR) +try: + _DEFAULT_FIXERS = refactor.get_fixers_from_package("myfixes") +finally: + sys.path.pop() + +_2TO3_FIXERS = refactor.get_fixers_from_package("lib2to3.fixes") + +class TestRefactoringTool(unittest.TestCase): + + def setUp(self): + sys.path.append(FIXER_DIR) + + def tearDown(self): + sys.path.pop() + + def check_instances(self, instances, classes): + for inst, cls in zip(instances, classes): + if not isinstance(inst, cls): + self.fail("%s are not instances of %s" % instances, classes) + + def rt(self, options=None, fixers=_DEFAULT_FIXERS, explicit=None): + return refactor.RefactoringTool(fixers, options, explicit) + + def test_print_function_option(self): + rt = self.rt({"print_function" : True}) + self.assertTrue(rt.grammar is pygram.python_grammar_no_print_statement) + self.assertTrue(rt.driver.grammar is + pygram.python_grammar_no_print_statement) + + def test_fixer_loading_helpers(self): + contents = ["explicit", "first", "last", "parrot", "preorder"] + non_prefixed = refactor.get_all_fix_names("myfixes") + prefixed = refactor.get_all_fix_names("myfixes", False) + full_names = refactor.get_fixers_from_package("myfixes") + self.assertEqual(prefixed, ["fix_" + name for name in contents]) + self.assertEqual(non_prefixed, contents) + self.assertEqual(full_names, + ["myfixes.fix_" + name for name in contents]) + + def test_detect_future_features(self): + run = refactor._detect_future_features + fs = frozenset + empty = fs() + self.assertEqual(run(""), empty) + self.assertEqual(run("from __future__ import print_function"), + fs(("print_function",))) + self.assertEqual(run("from __future__ import generators"), + fs(("generators",))) + self.assertEqual(run("from __future__ import generators, feature"), + fs(("generators", "feature"))) + inp = "from __future__ import generators, print_function" + self.assertEqual(run(inp), fs(("generators", "print_function"))) + inp ="from __future__ import print_function, generators" + self.assertEqual(run(inp), fs(("print_function", "generators"))) + inp = "from __future__ import (print_function,)" + self.assertEqual(run(inp), fs(("print_function",))) + inp = "from __future__ import (generators, print_function)" + self.assertEqual(run(inp), fs(("generators", "print_function"))) + inp = "from __future__ import (generators, nested_scopes)" + self.assertEqual(run(inp), fs(("generators", "nested_scopes"))) + inp = """from __future__ import generators +from __future__ import print_function""" + self.assertEqual(run(inp), fs(("generators", "print_function"))) + invalid = ("from", + "from 4", + "from x", + "from x 5", + "from x im", + "from x import", + "from x import 4", + ) + for inp in invalid: + self.assertEqual(run(inp), empty) + inp = "'docstring'\nfrom __future__ import print_function" + self.assertEqual(run(inp), fs(("print_function",))) + inp = "'docstring'\n'somng'\nfrom __future__ import print_function" + self.assertEqual(run(inp), empty) + inp = "# comment\nfrom __future__ import print_function" + self.assertEqual(run(inp), fs(("print_function",))) + inp = "# comment\n'doc'\nfrom __future__ import print_function" + self.assertEqual(run(inp), fs(("print_function",))) + inp = "class x: pass\nfrom __future__ import print_function" + self.assertEqual(run(inp), empty) + + def test_get_headnode_dict(self): + class NoneFix(fixer_base.BaseFix): + pass + + class FileInputFix(fixer_base.BaseFix): + PATTERN = "file_input< any * >" + + class SimpleFix(fixer_base.BaseFix): + PATTERN = "'name'" + + no_head = NoneFix({}, []) + with_head = FileInputFix({}, []) + simple = SimpleFix({}, []) + d = refactor._get_headnode_dict([no_head, with_head, simple]) + top_fixes = d.pop(pygram.python_symbols.file_input) + self.assertEqual(top_fixes, [with_head, no_head]) + name_fixes = d.pop(token.NAME) + self.assertEqual(name_fixes, [simple, no_head]) + for fixes in d.values(): + self.assertEqual(fixes, [no_head]) + + def test_fixer_loading(self): + from myfixes.fix_first import FixFirst + from myfixes.fix_last import FixLast + from myfixes.fix_parrot import FixParrot + from myfixes.fix_preorder import FixPreorder + + rt = self.rt() + pre, post = rt.get_fixers() + + self.check_instances(pre, [FixPreorder]) + self.check_instances(post, [FixFirst, FixParrot, FixLast]) + + def test_naughty_fixers(self): + self.assertRaises(ImportError, self.rt, fixers=["not_here"]) + self.assertRaises(refactor.FixerError, self.rt, fixers=["no_fixer_cls"]) + self.assertRaises(refactor.FixerError, self.rt, fixers=["bad_order"]) + + def test_refactor_string(self): + rt = self.rt() + input = "def parrot(): pass\n\n" + tree = rt.refactor_string(input, "") + self.assertNotEqual(str(tree), input) + + input = "def f(): pass\n\n" + tree = rt.refactor_string(input, "") + self.assertEqual(str(tree), input) + + def test_refactor_stdin(self): + + class MyRT(refactor.RefactoringTool): + + def print_output(self, old_text, new_text, filename, equal): + results.extend([old_text, new_text, filename, equal]) + + results = [] + rt = MyRT(_DEFAULT_FIXERS) + save = sys.stdin + sys.stdin = io.StringIO("def parrot(): pass\n\n") + try: + rt.refactor_stdin() + finally: + sys.stdin = save + expected = ["def parrot(): pass\n\n", + "def cheese(): pass\n\n", + "", False] + self.assertEqual(results, expected) + + def check_file_refactoring(self, test_file, fixers=_2TO3_FIXERS): + def read_file(): + with open(test_file, "rb") as fp: + return fp.read() + old_contents = read_file() + rt = self.rt(fixers=fixers) + + rt.refactor_file(test_file) + self.assertEqual(old_contents, read_file()) + + try: + rt.refactor_file(test_file, True) + new_contents = read_file() + self.assertNotEqual(old_contents, new_contents) + finally: + with open(test_file, "wb") as fp: + fp.write(old_contents) + return new_contents + + def test_refactor_file(self): + test_file = os.path.join(FIXER_DIR, "parrot_example.py") + self.check_file_refactoring(test_file, _DEFAULT_FIXERS) + + def test_refactor_dir(self): + def check(structure, expected): + def mock_refactor_file(self, f, *args): + got.append(f) + save_func = refactor.RefactoringTool.refactor_file + refactor.RefactoringTool.refactor_file = mock_refactor_file + rt = self.rt() + got = [] + dir = tempfile.mkdtemp(prefix="2to3-test_refactor") + try: + os.mkdir(os.path.join(dir, "a_dir")) + for fn in structure: + open(os.path.join(dir, fn), "wb").close() + rt.refactor_dir(dir) + finally: + refactor.RefactoringTool.refactor_file = save_func + shutil.rmtree(dir) + self.assertEqual(got, + [os.path.join(dir, path) for path in expected]) + check([], []) + tree = ["nothing", + "hi.py", + ".dumb", + ".after.py", + "notpy.npy", + "sappy"] + expected = ["hi.py"] + check(tree, expected) + tree = ["hi.py", + os.path.join("a_dir", "stuff.py")] + check(tree, tree) + + def test_file_encoding(self): + fn = os.path.join(TEST_DATA_DIR, "different_encoding.py") + self.check_file_refactoring(fn) + + def test_bom(self): + fn = os.path.join(TEST_DATA_DIR, "bom.py") + data = self.check_file_refactoring(fn) + self.assertTrue(data.startswith(codecs.BOM_UTF8)) + + def test_crlf_newlines(self): + old_sep = os.linesep + os.linesep = "\r\n" + try: + fn = os.path.join(TEST_DATA_DIR, "crlf.py") + fixes = refactor.get_fixers_from_package("lib2to3.fixes") + self.check_file_refactoring(fn, fixes) + finally: + os.linesep = old_sep + + def test_refactor_docstring(self): + rt = self.rt() + + doc = """ +>>> example() +42 +""" + out = rt.refactor_docstring(doc, "") + self.assertEqual(out, doc) + + doc = """ +>>> def parrot(): +... return 43 +""" + out = rt.refactor_docstring(doc, "") + self.assertNotEqual(out, doc) + + def test_explicit(self): + from myfixes.fix_explicit import FixExplicit + + rt = self.rt(fixers=["myfixes.fix_explicit"]) + self.assertEqual(len(rt.post_order), 0) + + rt = self.rt(explicit=["myfixes.fix_explicit"]) + for fix in rt.post_order: + if isinstance(fix, FixExplicit): + break + else: + self.fail("explicit fixer not loaded") diff --git a/lib3/2to3/lib2to3/tests/test_util.py b/lib3/2to3/lib2to3/tests/test_util.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/lib2to3/tests/test_util.py @@ -0,0 +1,594 @@ +""" Test suite for the code in fixer_util """ + +# Testing imports +from . import support + +# Python imports +import os.path + +# Local imports +from lib2to3.pytree import Node, Leaf +from lib2to3 import fixer_util +from lib2to3.fixer_util import Attr, Name, Call, Comma +from lib2to3.pgen2 import token + +def parse(code, strip_levels=0): + # The topmost node is file_input, which we don't care about. + # The next-topmost node is a *_stmt node, which we also don't care about + tree = support.parse_string(code) + for i in range(strip_levels): + tree = tree.children[0] + tree.parent = None + return tree + +class MacroTestCase(support.TestCase): + def assertStr(self, node, string): + if isinstance(node, (tuple, list)): + node = Node(fixer_util.syms.simple_stmt, node) + self.assertEqual(str(node), string) + + +class Test_is_tuple(support.TestCase): + def is_tuple(self, string): + return fixer_util.is_tuple(parse(string, strip_levels=2)) + + def test_valid(self): + self.assertTrue(self.is_tuple("(a, b)")) + self.assertTrue(self.is_tuple("(a, (b, c))")) + self.assertTrue(self.is_tuple("((a, (b, c)),)")) + self.assertTrue(self.is_tuple("(a,)")) + self.assertTrue(self.is_tuple("()")) + + def test_invalid(self): + self.assertFalse(self.is_tuple("(a)")) + self.assertFalse(self.is_tuple("('foo') % (b, c)")) + + +class Test_is_list(support.TestCase): + def is_list(self, string): + return fixer_util.is_list(parse(string, strip_levels=2)) + + def test_valid(self): + self.assertTrue(self.is_list("[]")) + self.assertTrue(self.is_list("[a]")) + self.assertTrue(self.is_list("[a, b]")) + self.assertTrue(self.is_list("[a, [b, c]]")) + self.assertTrue(self.is_list("[[a, [b, c]],]")) + + def test_invalid(self): + self.assertFalse(self.is_list("[]+[]")) + + +class Test_Attr(MacroTestCase): + def test(self): + call = parse("foo()", strip_levels=2) + + self.assertStr(Attr(Name("a"), Name("b")), "a.b") + self.assertStr(Attr(call, Name("b")), "foo().b") + + def test_returns(self): + attr = Attr(Name("a"), Name("b")) + self.assertEqual(type(attr), list) + + +class Test_Name(MacroTestCase): + def test(self): + self.assertStr(Name("a"), "a") + self.assertStr(Name("foo.foo().bar"), "foo.foo().bar") + self.assertStr(Name("a", prefix="b"), "ba") + + +class Test_Call(MacroTestCase): + def _Call(self, name, args=None, prefix=None): + """Help the next test""" + children = [] + if isinstance(args, list): + for arg in args: + children.append(arg) + children.append(Comma()) + children.pop() + return Call(Name(name), children, prefix) + + def test(self): + kids = [None, + [Leaf(token.NUMBER, 1), Leaf(token.NUMBER, 2), + Leaf(token.NUMBER, 3)], + [Leaf(token.NUMBER, 1), Leaf(token.NUMBER, 3), + Leaf(token.NUMBER, 2), Leaf(token.NUMBER, 4)], + [Leaf(token.STRING, "b"), Leaf(token.STRING, "j", prefix=" ")] + ] + self.assertStr(self._Call("A"), "A()") + self.assertStr(self._Call("b", kids[1]), "b(1,2,3)") + self.assertStr(self._Call("a.b().c", kids[2]), "a.b().c(1,3,2,4)") + self.assertStr(self._Call("d", kids[3], prefix=" "), " d(b, j)") + + +class Test_does_tree_import(support.TestCase): + def _find_bind_rec(self, name, node): + # Search a tree for a binding -- used to find the starting + # point for these tests. + c = fixer_util.find_binding(name, node) + if c: return c + for child in node.children: + c = self._find_bind_rec(name, child) + if c: return c + + def does_tree_import(self, package, name, string): + node = parse(string) + # Find the binding of start -- that's what we'll go from + node = self._find_bind_rec('start', node) + return fixer_util.does_tree_import(package, name, node) + + def try_with(self, string): + failing_tests = (("a", "a", "from a import b"), + ("a.d", "a", "from a.d import b"), + ("d.a", "a", "from d.a import b"), + (None, "a", "import b"), + (None, "a", "import b, c, d")) + for package, name, import_ in failing_tests: + n = self.does_tree_import(package, name, import_ + "\n" + string) + self.assertFalse(n) + n = self.does_tree_import(package, name, string + "\n" + import_) + self.assertFalse(n) + + passing_tests = (("a", "a", "from a import a"), + ("x", "a", "from x import a"), + ("x", "a", "from x import b, c, a, d"), + ("x.b", "a", "from x.b import a"), + ("x.b", "a", "from x.b import b, c, a, d"), + (None, "a", "import a"), + (None, "a", "import b, c, a, d")) + for package, name, import_ in passing_tests: + n = self.does_tree_import(package, name, import_ + "\n" + string) + self.assertTrue(n) + n = self.does_tree_import(package, name, string + "\n" + import_) + self.assertTrue(n) + + def test_in_function(self): + self.try_with("def foo():\n\tbar.baz()\n\tstart=3") + +class Test_find_binding(support.TestCase): + def find_binding(self, name, string, package=None): + return fixer_util.find_binding(name, parse(string), package) + + def test_simple_assignment(self): + self.assertTrue(self.find_binding("a", "a = b")) + self.assertTrue(self.find_binding("a", "a = [b, c, d]")) + self.assertTrue(self.find_binding("a", "a = foo()")) + self.assertTrue(self.find_binding("a", "a = foo().foo.foo[6][foo]")) + self.assertFalse(self.find_binding("a", "foo = a")) + self.assertFalse(self.find_binding("a", "foo = (a, b, c)")) + + def test_tuple_assignment(self): + self.assertTrue(self.find_binding("a", "(a,) = b")) + self.assertTrue(self.find_binding("a", "(a, b, c) = [b, c, d]")) + self.assertTrue(self.find_binding("a", "(c, (d, a), b) = foo()")) + self.assertTrue(self.find_binding("a", "(a, b) = foo().foo[6][foo]")) + self.assertFalse(self.find_binding("a", "(foo, b) = (b, a)")) + self.assertFalse(self.find_binding("a", "(foo, (b, c)) = (a, b, c)")) + + def test_list_assignment(self): + self.assertTrue(self.find_binding("a", "[a] = b")) + self.assertTrue(self.find_binding("a", "[a, b, c] = [b, c, d]")) + self.assertTrue(self.find_binding("a", "[c, [d, a], b] = foo()")) + self.assertTrue(self.find_binding("a", "[a, b] = foo().foo[a][foo]")) + self.assertFalse(self.find_binding("a", "[foo, b] = (b, a)")) + self.assertFalse(self.find_binding("a", "[foo, [b, c]] = (a, b, c)")) + + def test_invalid_assignments(self): + self.assertFalse(self.find_binding("a", "foo.a = 5")) + self.assertFalse(self.find_binding("a", "foo[a] = 5")) + self.assertFalse(self.find_binding("a", "foo(a) = 5")) + self.assertFalse(self.find_binding("a", "foo(a, b) = 5")) + + def test_simple_import(self): + self.assertTrue(self.find_binding("a", "import a")) + self.assertTrue(self.find_binding("a", "import b, c, a, d")) + self.assertFalse(self.find_binding("a", "import b")) + self.assertFalse(self.find_binding("a", "import b, c, d")) + + def test_from_import(self): + self.assertTrue(self.find_binding("a", "from x import a")) + self.assertTrue(self.find_binding("a", "from a import a")) + self.assertTrue(self.find_binding("a", "from x import b, c, a, d")) + self.assertTrue(self.find_binding("a", "from x.b import a")) + self.assertTrue(self.find_binding("a", "from x.b import b, c, a, d")) + self.assertFalse(self.find_binding("a", "from a import b")) + self.assertFalse(self.find_binding("a", "from a.d import b")) + self.assertFalse(self.find_binding("a", "from d.a import b")) + + def test_import_as(self): + self.assertTrue(self.find_binding("a", "import b as a")) + self.assertTrue(self.find_binding("a", "import b as a, c, a as f, d")) + self.assertFalse(self.find_binding("a", "import a as f")) + self.assertFalse(self.find_binding("a", "import b, c as f, d as e")) + + def test_from_import_as(self): + self.assertTrue(self.find_binding("a", "from x import b as a")) + self.assertTrue(self.find_binding("a", "from x import g as a, d as b")) + self.assertTrue(self.find_binding("a", "from x.b import t as a")) + self.assertTrue(self.find_binding("a", "from x.b import g as a, d")) + self.assertFalse(self.find_binding("a", "from a import b as t")) + self.assertFalse(self.find_binding("a", "from a.d import b as t")) + self.assertFalse(self.find_binding("a", "from d.a import b as t")) + + def test_simple_import_with_package(self): + self.assertTrue(self.find_binding("b", "import b")) + self.assertTrue(self.find_binding("b", "import b, c, d")) + self.assertFalse(self.find_binding("b", "import b", "b")) + self.assertFalse(self.find_binding("b", "import b, c, d", "c")) + + def test_from_import_with_package(self): + self.assertTrue(self.find_binding("a", "from x import a", "x")) + self.assertTrue(self.find_binding("a", "from a import a", "a")) + self.assertTrue(self.find_binding("a", "from x import *", "x")) + self.assertTrue(self.find_binding("a", "from x import b, c, a, d", "x")) + self.assertTrue(self.find_binding("a", "from x.b import a", "x.b")) + self.assertTrue(self.find_binding("a", "from x.b import *", "x.b")) + self.assertTrue(self.find_binding("a", "from x.b import b, c, a, d", "x.b")) + self.assertFalse(self.find_binding("a", "from a import b", "a")) + self.assertFalse(self.find_binding("a", "from a.d import b", "a.d")) + self.assertFalse(self.find_binding("a", "from d.a import b", "a.d")) + self.assertFalse(self.find_binding("a", "from x.y import *", "a.b")) + + def test_import_as_with_package(self): + self.assertFalse(self.find_binding("a", "import b.c as a", "b.c")) + self.assertFalse(self.find_binding("a", "import a as f", "f")) + self.assertFalse(self.find_binding("a", "import a as f", "a")) + + def test_from_import_as_with_package(self): + # Because it would take a lot of special-case code in the fixers + # to deal with from foo import bar as baz, we'll simply always + # fail if there is an "from ... import ... as ..." + self.assertFalse(self.find_binding("a", "from x import b as a", "x")) + self.assertFalse(self.find_binding("a", "from x import g as a, d as b", "x")) + self.assertFalse(self.find_binding("a", "from x.b import t as a", "x.b")) + self.assertFalse(self.find_binding("a", "from x.b import g as a, d", "x.b")) + self.assertFalse(self.find_binding("a", "from a import b as t", "a")) + self.assertFalse(self.find_binding("a", "from a import b as t", "b")) + self.assertFalse(self.find_binding("a", "from a import b as t", "t")) + + def test_function_def(self): + self.assertTrue(self.find_binding("a", "def a(): pass")) + self.assertTrue(self.find_binding("a", "def a(b, c, d): pass")) + self.assertTrue(self.find_binding("a", "def a(): b = 7")) + self.assertFalse(self.find_binding("a", "def d(b, (c, a), e): pass")) + self.assertFalse(self.find_binding("a", "def d(a=7): pass")) + self.assertFalse(self.find_binding("a", "def d(a): pass")) + self.assertFalse(self.find_binding("a", "def d(): a = 7")) + + s = """ + def d(): + def a(): + pass""" + self.assertFalse(self.find_binding("a", s)) + + def test_class_def(self): + self.assertTrue(self.find_binding("a", "class a: pass")) + self.assertTrue(self.find_binding("a", "class a(): pass")) + self.assertTrue(self.find_binding("a", "class a(b): pass")) + self.assertTrue(self.find_binding("a", "class a(b, c=8): pass")) + self.assertFalse(self.find_binding("a", "class d: pass")) + self.assertFalse(self.find_binding("a", "class d(a): pass")) + self.assertFalse(self.find_binding("a", "class d(b, a=7): pass")) + self.assertFalse(self.find_binding("a", "class d(b, *a): pass")) + self.assertFalse(self.find_binding("a", "class d(b, **a): pass")) + self.assertFalse(self.find_binding("a", "class d: a = 7")) + + s = """ + class d(): + class a(): + pass""" + self.assertFalse(self.find_binding("a", s)) + + def test_for(self): + self.assertTrue(self.find_binding("a", "for a in r: pass")) + self.assertTrue(self.find_binding("a", "for a, b in r: pass")) + self.assertTrue(self.find_binding("a", "for (a, b) in r: pass")) + self.assertTrue(self.find_binding("a", "for c, (a,) in r: pass")) + self.assertTrue(self.find_binding("a", "for c, (a, b) in r: pass")) + self.assertTrue(self.find_binding("a", "for c in r: a = c")) + self.assertFalse(self.find_binding("a", "for c in a: pass")) + + def test_for_nested(self): + s = """ + for b in r: + for a in b: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for a, c in b: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for (a, c) in b: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for (a,) in b: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for c, (a, d) in b: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for c in b: + a = 7""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + for b in r: + for c in b: + d = a""" + self.assertFalse(self.find_binding("a", s)) + + s = """ + for b in r: + for c in a: + d = 7""" + self.assertFalse(self.find_binding("a", s)) + + def test_if(self): + self.assertTrue(self.find_binding("a", "if b in r: a = c")) + self.assertFalse(self.find_binding("a", "if a in r: d = e")) + + def test_if_nested(self): + s = """ + if b in r: + if c in d: + a = c""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + if b in r: + if c in d: + c = a""" + self.assertFalse(self.find_binding("a", s)) + + def test_while(self): + self.assertTrue(self.find_binding("a", "while b in r: a = c")) + self.assertFalse(self.find_binding("a", "while a in r: d = e")) + + def test_while_nested(self): + s = """ + while b in r: + while c in d: + a = c""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + while b in r: + while c in d: + c = a""" + self.assertFalse(self.find_binding("a", s)) + + def test_try_except(self): + s = """ + try: + a = 6 + except: + b = 8""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except KeyError: + pass + except: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + b = 6""" + self.assertFalse(self.find_binding("a", s)) + + def test_try_except_nested(self): + s = """ + try: + try: + a = 6 + except: + pass + except: + b = 8""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + try: + a = 6 + except: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + try: + pass + except: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + try: + b = 8 + except KeyError: + pass + except: + a = 6 + except: + pass""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + pass + except: + try: + b = 8 + except KeyError: + pass + except: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + b = 6""" + self.assertFalse(self.find_binding("a", s)) + + s = """ + try: + try: + b = 8 + except: + c = d + except: + try: + b = 6 + except: + t = 8 + except: + o = y""" + self.assertFalse(self.find_binding("a", s)) + + def test_try_except_finally(self): + s = """ + try: + c = 6 + except: + b = 8 + finally: + a = 9""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + finally: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + finally: + b = 6""" + self.assertFalse(self.find_binding("a", s)) + + s = """ + try: + b = 8 + except: + b = 9 + finally: + b = 6""" + self.assertFalse(self.find_binding("a", s)) + + def test_try_except_finally_nested(self): + s = """ + try: + c = 6 + except: + b = 8 + finally: + try: + a = 9 + except: + b = 9 + finally: + c = 9""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + finally: + try: + pass + finally: + a = 6""" + self.assertTrue(self.find_binding("a", s)) + + s = """ + try: + b = 8 + finally: + try: + b = 6 + finally: + b = 7""" + self.assertFalse(self.find_binding("a", s)) + +class Test_touch_import(support.TestCase): + + def test_after_docstring(self): + node = parse('"""foo"""\nbar()') + fixer_util.touch_import(None, "foo", node) + self.assertEqual(str(node), '"""foo"""\nimport foo\nbar()\n\n') + + def test_after_imports(self): + node = parse('"""foo"""\nimport bar\nbar()') + fixer_util.touch_import(None, "foo", node) + self.assertEqual(str(node), '"""foo"""\nimport bar\nimport foo\nbar()\n\n') + + def test_beginning(self): + node = parse('bar()') + fixer_util.touch_import(None, "foo", node) + self.assertEqual(str(node), 'import foo\nbar()\n\n') + + def test_from_import(self): + node = parse('bar()') + fixer_util.touch_import("html", "escape", node) + self.assertEqual(str(node), 'from html import escape\nbar()\n\n') + + def test_name_import(self): + node = parse('bar()') + fixer_util.touch_import(None, "cgi", node) + self.assertEqual(str(node), 'import cgi\nbar()\n\n') + +class Test_find_indentation(support.TestCase): + + def test_nothing(self): + fi = fixer_util.find_indentation + node = parse("node()") + self.assertEqual(fi(node), "") + node = parse("") + self.assertEqual(fi(node), "") + + def test_simple(self): + fi = fixer_util.find_indentation + node = parse("def f():\n x()") + self.assertEqual(fi(node), "") + self.assertEqual(fi(node.children[0].children[4].children[2]), " ") + node = parse("def f():\n x()\n y()") + self.assertEqual(fi(node.children[0].children[4].children[4]), " ") diff --git a/lib3/2to3/scripts/benchmark.py b/lib3/2to3/scripts/benchmark.py new file mode 100644 --- /dev/null +++ b/lib3/2to3/scripts/benchmark.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python2.5 +""" +This is a benchmarking script to test the speed of 2to3's pattern matching +system. It's equivalent to "refactor.py -f all" for every Python module +in sys.modules, but without engaging the actual transformations. +""" + +__author__ = "Collin Winter " + +# Python imports +import os.path +import sys +from time import time + +# Test imports +from .support import adjust_path +adjust_path() + +# Local imports +from .. import refactor + +### Mock code for refactor.py and the fixers +############################################################################### +class Options: + def __init__(self, **kwargs): + for k, v in list(kwargs.items()): + setattr(self, k, v) + + self.verbose = False + +def dummy_transform(*args, **kwargs): + pass + +### Collect list of modules to match against +############################################################################### +files = [] +for mod in list(sys.modules.values()): + if mod is None or not hasattr(mod, '__file__'): + continue + f = mod.__file__ + if f.endswith('.pyc'): + f = f[:-1] + if f.endswith('.py'): + files.append(f) + +### Set up refactor and run the benchmark +############################################################################### +options = Options(fix=["all"], print_function=False, doctests_only=False) +refactor = refactor.RefactoringTool(options) +for fixer in refactor.fixers: + # We don't want them to actually fix the tree, just match against it. + fixer.transform = dummy_transform + +t = time() +for f in files: + print("Matching", f) + refactor.refactor_file(f) +print("%d seconds to match %d files" % (time() - t, len(sys.modules))) diff --git a/lib3/2to3/scripts/find_pattern.py b/lib3/2to3/scripts/find_pattern.py new file mode 100755 --- /dev/null +++ b/lib3/2to3/scripts/find_pattern.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python + +"""Script that makes determining PATTERN for a new fix much easier. + +Figuring out exactly what PATTERN I want for a given fixer class is +getting tedious. This script will step through each possible subtree +for a given string, allowing you to select which one you want. It will +then try to figure out an appropriate pattern to match that tree. This +pattern will require some editing (it will be overly restrictive) but +should provide a solid base to work with and handle the tricky parts. + +Usage: + + python find_pattern.py "g.throw(E, V, T)" + +This will step through each subtree in the parse. To reject a +candidate subtree, hit enter; to accept a candidate, hit "y" and +enter. The pattern will be spit out to stdout. + +For example, the above will yield a succession of possible snippets, +skipping all leaf-only trees. I accept + +'g.throw(E, V, T)' + +This causes find_pattern to spit out + +power< 'g' trailer< '.' 'throw' > + trailer< '(' arglist< 'E' ',' 'V' ',' 'T' > ')' > > + + +Some minor tweaks later, I'm left with + +power< any trailer< '.' 'throw' > + trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' > > + +which is exactly what I was after. + +Larger snippets can be placed in a file (as opposed to a command-line +arg) and processed with the -f option. +""" + +__author__ = "Collin Winter " + +# Python imports +import optparse +import sys +from io import StringIO + +# Local imports +from lib2to3 import pytree +from lib2to3.pgen2 import driver +from lib2to3.pygram import python_symbols, python_grammar + +driver = driver.Driver(python_grammar, convert=pytree.convert) + +def main(args): + parser = optparse.OptionParser(usage="find_pattern.py [options] [string]") + parser.add_option("-f", "--file", action="store", + help="Read a code snippet from the specified file") + + # Parse command line arguments + options, args = parser.parse_args(args) + if options.file: + tree = driver.parse_file(options.file) + elif len(args) > 1: + tree = driver.parse_stream(StringIO(args[1] + "\n")) + else: + print("You must specify an input file or an input string", file=sys.stderr) + return 1 + + examine_tree(tree) + return 0 + +def examine_tree(tree): + for node in tree.post_order(): + if isinstance(node, pytree.Leaf): + continue + print(repr(str(node))) + verdict = input() + if verdict.strip(): + print(find_pattern(node)) + return + +def find_pattern(node): + if isinstance(node, pytree.Leaf): + return repr(node.value) + + return find_symbol(node.type) + \ + "< " + " ".join(find_pattern(n) for n in node.children) + " >" + +def find_symbol(sym): + for n, v in list(python_symbols.__dict__.items()): + if v == sym: + return n + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/lib3/2to3/test.py b/lib3/2to3/test.py new file mode 100755 --- /dev/null +++ b/lib3/2to3/test.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +"""Main test file for 2to3. + +Running "python test.py" will run all tests in tests/test_*.py. +""" +# Author: Collin Winter + +import unittest +from lib2to3 import tests +import lib2to3.tests.support +from sys import exit, argv + +if "-h" in argv or "--help" in argv or len(argv) > 2: + print("Usage: %s [-h] [test suite[.test class]]" %(argv[0])) + print("default : run all tests in lib2to3/tests/test_*.py") + print("test suite: run tests in lib2to3/tests/") + print("test class : run tests in .") + exit(1) + +if len(argv) == 2: + mod = tests + for m in argv[1].split("."): + mod = getattr(mod, m, None) + if not mod: + print("Error importing %s" %(m)) + exit(1) + + if argv[1].find(".") == -1: + # Just the module was specified, load all the tests + suite = unittest.TestLoader().loadTestsFromModule(mod) + else: + # A class was specified, load that + suite = unittest.makeSuite(mod) +else: + suite = tests.all_tests + +try: + tests.support.run_all_tests(tests=suite) +except KeyboardInterrupt: + pass diff --git a/lib3/Chameleon-2.9.2/.gitignore b/lib3/Chameleon-2.9.2/.gitignore new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/.gitignore @@ -0,0 +1,12 @@ +*.pyc +*.egg +*.egg-info +.coverage +.tox/ +coverage.xml +nosetests.xml +*.tar.gz +env25/ +env26/ +env27/ +env32/ diff --git a/lib3/Chameleon-2.9.2/CHANGES.rst b/lib3/Chameleon-2.9.2/CHANGES.rst new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/CHANGES.rst @@ -0,0 +1,1075 @@ +Changes +======= + +2.9.2 (2012-06-06) +------------------ + +Bugfixes: + +- Fixed a PyPy incompatibility. + +- Fixed issue #109 which caused testing failures on some platforms. + +2.9.1 (2012-06-01) +------------------ + +Bugfixes: + +- Fixed issue #103. The ``tal:on-error`` statement now always adds an + explicit end-tag to the element, even with a substitution content of + nothing. + +- Fixed issue #113. The ``tal:on-error`` statement now works correctly + also for dynamic attributes. That is, the fallback tag now includes + only static attributes. + +- Fixed name error which prevented the benchmark from running + correctly. + +Compatibility: + +- Fixed deprecation warning on Python 3 for zope interface implements + declaration. This fixes issue #116. + +2.9.0 (2012-05-31) +------------------ + +Features: + +- The translation function now gets the ``econtext`` argument as the + value for ``context``. Note that historically, this was usually an + HTTP request which might provide language negotiation data through a + dictionary interface. + [alvinyue] + +Bugfixes: + +- Fixed import alias issue which would lead to a syntax error in + generated Python code. Fixes issue #114. + +2.8.5 (2012-05-02) +------------------ + +Bugfixes: + +- Fixed minor installation issues on Python 2.5 and 3. + [ppaez] + +- Ensure output is unicode even when trivial (an empty string). + +2.8.4 (2012-04-18) +------------------ + +Features: + +- In exception output, long filenames are now truncated to 60 + characters of output, preventing line wrap which makes it difficult + to scan the exception output. + +Bugfixes: + +- Include filename and location in exception output for exceptions + raised during compilation. + +- If a trivial translation substitution variable is given (i.e. an + empty string), simply ignore it. This fixes issue #106. + +2.8.3 (2012-04-16) +------------------ + +Features: + +- Log template source on debug-level before cooking. + +- The `target_language` argument, if given, is now available as a + variable in templates. + +2.8.2 (2012-03-30) +------------------ + +Features: + +- Temporary caches used in debug mode are cleaned up eagerly, rather + than waiting for process termination. + [mitchellrj] + +Bugfixes: + +- The `index`, `start` and `end` methods on the TAL repeat object are + now callable. This fixes an incompatibility with ZPT. + +- The loader now correctly handles absolute paths on Windows. + [rdale] + +2.8.1 (2012-03-29) +------------------ + +Features: + +- The exception formatter now lists errors in 'wrapping order'. This + means that the innermost, and presumably most relevant exception is + shown last. + +Bugfixes: + +- The exception formatter now correctly recognizes nested errors and + does not rewrap the dynamically generated exception class. + +- The exception formatter now correctly sets the ``__module__`` + attribute to that of the original exception class. + +2.8.0 (2012-02-29) +------------------ + +Features: + +- Added support for code blocks using the `` processing + instruction syntax. + + The scope is name assignments is up until the nearest macro + definition, or the template itself if macros are not used. + +Bugfixes: + +- Fall back to the exception class' ``__new__`` method to safely + create an exception object that is not implemented in Python. + +- The exception formatter now keeps track of already formatted + exceptions, and ignores them from further output. + +2.7.4 (2012-02-27) +------------------ + +- The error handler now invokes the ``__init__`` method of + ``BaseException`` instead of the possibly overriden method (which + may take required arguments). This fixes issue #97. + [j23d, malthe] + +2.7.3 (2012-01-16) +------------------ + +Bugfixes: + +- The trim whitespace option now correctly trims actual whitespace to + a single character, appearing either to the left or to the right of + an element prefix or suffix string. + +2.7.2 (2012-01-08) +------------------ + +Features: + +- Added option ``trim_attribute_space`` that decides whether attribute + whitespace is stripped (at most down to a single space). This option + exists to provide compatibility with the reference + implementation. Fixes issue #85. + +Bugfixes: + +- Ignore unhashable builtins when generating a reverse builtin + map to quickly look up a builtin value. + [malthe] + +- Apply translation mapping even when a translation function is not + available. This fixes issue #83. + [malthe] + +- Fixed issue #80. The translation domain for a slot is defined by the + source document, i.e. the template providing the content for a slot + whether it be the default or provided through ``metal:fill-slot``. + [jcbrand] + +- In certain circumstances, a Unicode non-breaking space character would cause + a define clause to fail to parse. + +2.7.1 (2011-12-29) +------------------ + +Features: + +- Enable expression interpolation in CDATA. + +- The page template class now implements dictionary access to macros:: + + template[name] + + This is a short-hand for:: + + template.macros[name] + +Bugfixes: + +- An invalid define clause would be silently ignored; we now raise a + language error exception. This fixes issue #79. + +- Fixed regression where ``${...}`` interpolation expressions could + not span multiple lines. This fixes issue #77. + +2.7.0 (2011-12-13) +------------------ + +Features: + +- The ``load:`` expression now derives from the string expression such + that the ``${...}`` operator can be used for expression + interpolation. + +- The ``load:`` expression now accepts asset specs; these are resolved + by the ``pkg_resources.resource_filename`` function:: + + : + + An example from the test suite:: + + chameleon:tests/inputs/hello_world.pt + +Bugfixes: + +- If an attribute name for translation was not a valid Python + identifier, the compiler would generate invalid code. This has been + fixed, and the compiler now also throws an exception if an attribute + specification contains a comma. (Note that the only valid separator + character is the semicolon, when specifying attributes for + translation via the ``i18n:translate`` statement). This addresses + issue #76. + +2.6.2 (2011-12-08) +------------------ + +Bugfixes: + +- Fixed issue where ``tal:on-error`` would not respect + ``tal:omit-tag`` or namespace elements which are omitted by default + (such as ````). + +- Fixed issue where ``macros`` attribute would not be available on + file-based templates due to incorrect initialization. + +- The ``TryExcept`` and ``TryFinally`` AST nodes are not available on + Python 3.3. These have been aliased to ``Try``. This fixes issue + #75. + +Features: + +- The TAL repeat item now makes a security declaration that grants + access to unprotected subobjects on the Zope 2 platform:: + + __allow_access_to_unprotected_subobjects__ = True + + This is required for legacy compatibility and does not affect other + environments. + +- The template object now has a method ``write(body)`` which + explicitly decodes and cooks a string input. + +- Added configuration option ``loader_class`` which sets the class + used to create the template loader object. + + The class (essentially a callable) is created at template + construction time. + +2.6.1 (2011-11-30) +------------------ + +Bugfixes: + +- Decode HTML entities in expression interpolation strings. This fixes + issue #74. + +- Allow ``xml`` and ``xmlns`` attributes on TAL, I18N and METAL + namespace elements. This fixes issue #73. + +2.6.0 (2011-11-24) +------------------ + +Features: + +- Added support for implicit translation: + + The ``implicit_i18n_translate`` option enables implicit translation + of text. The ``implicit_i18n_attributes`` enables implicit + translation of attributes. The latter must be a set and for an + attribute to be implicitly translated, its lowercase string value + must be included in the set. + +- Added option ``strict`` (enabled by default) which decides whether + expressions are required to be valid at compile time. That is, if + not set, an exception is only raised for an invalid expression at + evaluation time. + +- An expression error now results in an exception only if the + expression is attempted evaluated during a rendering. + +- Added a configuration option ``prepend_relative_search_path`` which + decides whether the path relative to a file-based template is + prepended to the load search path. The default is ``True``. + +- Added a configuration option ``search_path`` to the file-based + template class, which adds additional paths to the template load + instance bound to the ``load:`` expression. The option takes a + string path or an iterable yielding string paths. The default value + is the empty set. + +Bugfixes: + +- Exception instances now support pickle/unpickle. + +- An attributes in i18n:attributes no longer needs to match an + existing or dynamic attribute in order to appear in the + element. This fixes issue #66. + +2.5.3 (2011-10-23) +------------------ + +Bugfixes: + +- Fixed an issue where a nested macro slot definition would fail even + though there existed a parent macro definition. This fixes issue + #69. + +2.5.2 (2011-10-12) +------------------ + +Bugfixes: + +- Fixed an issue where technically invalid input would result in a + compiler error. + +Features: + +- The markup class now inherits from the unicode string type such that + it's compatible with the string interface. + +2.5.1 (2011-09-29) +------------------ + +Bugfixes: + +- The symbol names "convert", "decode" and "translate" are now no + longer set as read-only *compiler internals*. This fixes issue #65. + +- Fixed an issue where a macro extension chain nested two levels (a + template uses a macro that extends a macro) would lose the middle + slot definitions if slots were defined nested. + + The compiler now throws an error if a nested slot definition is used + outside a macro extension context. + +2.5.0 (2011-09-23) +------------------ + +Features: + +- An expression type ``structure:`` is now available which wraps the + expression result as *structure* such that it is not escaped on + insertion, e.g.:: + +
+ ${structure: context.body} +
+ + This also means that the ``structure`` keyword for ``tal:content`` + and ``tal:replace`` now has an alternative spelling via the + expression type ``structure:``. + +- The string-based template constructor now accepts encoded input. + +2.4.6 (2011-09-23) +------------------ + +Bugfixes: + +- The ``tal:on-error`` statement should catch all exceptions. + +- Fixed issue that would prevent escaping of interpolation expression + values appearing in text. + +2.4.5 (2011-09-21) +------------------ + +Bugfixes: + +- The ``tal:on-error`` handler should have a ``error`` variable + defined that has the value of the exception thrown. + +- The ``tal:on-error`` statement is a substitution statement and + should support the "text" and "structure" insertion methods. + +2.4.4 (2011-09-15) +------------------ + +Bugfixes: + +- An encoding specified in the XML document preamble is now read and + used to decode the template input to unicode. This fixes issue #55. + +- Encoded expression input on Python 3 is now correctly + decoded. Previously, the string representation output would be + included instead of an actually decoded string. + +- Expression result conversion steps are now correctly included in + error handling such that the exception output points to the + expression location. + +2.4.3 (2011-09-13) +------------------ + +Features: + +- When an encoding is provided, pass the 'ignore' flag to avoid + decoding issues with bad input. + +Bugfixes: + +- Fixed pypy compatibility issue (introduced in previous release). + +2.4.2 (2011-09-13) +------------------ + +Bugfixes: + +- Fixed an issue in the compiler where an internal variable (such as a + translation default value) would be cached, resulting in variable + scope corruption (see issue #49). + +2.4.1 (2011-09-08) +------------------ + +Bugfixes: + +- Fixed an issue where a default value for an attribute would + sometimes spill over into another attribute. + +- Fixed issue where the use of the ``default`` name in an attribute + interpolation expression would print the attribute value. This is + unexpected, because it's an expression, not a static text suitable + for output. An attribute value of ``default`` now correctly drops + the attribute. + +2.4.0 (2011-08-22) +------------------ + +Features: + +- Added an option ``boolean_attributes`` to evaluate and render a + provided set of attributes using a boolean logic: if the attribute + is a true value, the value will be the attribute name, otherwise the + attribute is dropped. + + In the reference implementation, the following attributes are + configured as boolean values when the template is rendered in + HTML-mode:: + + "compact", "nowrap", "ismap", "declare", "noshade", + "checked", "disabled", "readonly", "multiple", "selected", + "noresize", "defer" + + Note that in Chameleon, these attributes must be manually provided. + +Bugfixes: + +- The carriage return character (used on Windows platforms) would + incorrectly be included in Python comments. + + It is now replaced with a line break. + + This fixes issue #44. + +2.3.8 (2011-08-19) +------------------ + +- Fixed import error that affected Python 2.5 only. + +2.3.7 (2011-08-19) +------------------ + +Features: + +- Added an option ``literal_false`` that disables the default behavior + of dropping an attribute for a value of ``False`` (in addition to + ``None``). This modified behavior is the behavior exhibited in + reference implementation. + +Bugfixes: + +- Undo attribute special HTML attribute behavior (see previous + release). + + This turned out not to be a compatible behavior; rather, boolean + values should simply be coerced to a string. + + Meanwhile, the reference implementation does support an HTML mode in + which the special attribute behavior is exhibited. + + We do not currently support this mode. + +2.3.6 (2011-08-18) +------------------ + +Features: + +- Certain HTML attribute names now have a special behavior for a + attribute value of ``True`` (or ``default`` if no default is + defined). For these attributes, this return value will result in the + name being printed as the value:: + + + + will be rendered as:: + + + + This behavior is compatible with the reference implementation. + +2.3.5 (2011-08-18) +------------------ + +Features: + +- Added support for the set operator (``{item, item, ...}``). + +Bugfixes: + +- If macro is defined on the same element as a translation name, this + no longer results in a "translation name not allowed outside + translation" error. This fixes issue #43. + +- Attribute fallback to dictionary lookup now works on multiple items + (e.g. ``d1.d2.d2``). This fixes issue #42. + +2.3.4 (2011-08-16) +------------------ + +Features: + +- When inserting content in either attributes or text, a value of + ``True`` (like ``False`` and ``None``) will result in no + action. + +- Use statically assigned variables for ``"attrs"`` and + ``"default"``. This change yields a performance improvement of + 15-20%. + +- The template loader class now accepts an optional argument + ``default_extension`` which accepts a filename extension which will + be appended to the filename if there's not already an extension. + +Bugfixes: + +- The default symbol is now ``True`` for an attribute if the attribute + default is not provided. Note that the result is that the attribute + is dropped. This fixes issue #41. + +- Fixed an issue where assignment to a variable ``"type"`` would + fail. This fixes issue #40. + +- Fixed an issue where an (unsuccesful) assignment for a repeat loop + to a compiler internal name would not result in an error. + +- If the translation function returns the identical object, manually + coerce it to string. This fixes a compatibility issue with + translation functions which do not convert non-string objects to a + string value, but simply return them unchanged. + +2.3.3 (2011-08-15) +------------------ + +Features: + +- The ``load:`` expression now passes the initial keyword arguments to + its template loader (e.g. ``auto_reload`` and ``encoding``). + +- In the exception output, string variable values are now limited to a + limited output of characters, single line only. + +Bugfixes: + +- Fixed horizontal alignment of exception location info + (i.e. 'String:', 'Filename:' and 'Location:') such that they match + the template exception formatter. + +2.3.2 (2011-08-11) +------------------ + +Bugfixes: + +- Fixed issue where i18n:domain would not be inherited through macros + and slots. This fixes issue #37. + +2.3.1 (2011-08-11) +------------------ + +Features: + +- The ``Builtin`` node type may now be used to represent any Python + local or global name. This allows expression compilers to refer to + e.g. ``get`` or ``getitem``, or to explicit require a builtin object + such as one from the ``extra_builtins`` dictionary. + +Bugfixes: + +- Builtins which are not explicitly disallowed may now be redefined + and used as variables (e.g. ``nothing``). + +- Fixed compiler issue with circular node annotation loop. + +2.3 (2011-08-10) +---------------- + +Features: + +- Added support for the following syntax to disable inline evaluation + in a comment: + + + + Note that the initial question mark character (?) will be omitted + from output. + +- The parser now accepts '<' and '>' in attributes. Note that this is + invalid markup. Previously, the '<' would not be accepted as a valid + attribute value, but this would result in an 'unexpected end tag' + error elsewhere. This fixes issue #38. + +- The expression compiler now provides methods ``assign_text`` and + ``assign_value`` such that a template engine might configure this + value conversion to support e.g. encoded strings. + + Note that currently, the only client for the ``assign_text`` method + is the string expression type. + +- Enable template loader for string-based template classes. Note that + the ``filename`` keyword argument may be provided on initialization + to identify the template source by filename. This fixes issue #36. + +- Added ``extra_builtins`` option to the page template class. These + builtins are added to the default builtins dictionary at cook time + and may be provided at initialization using the ``extra_builtins`` + keyword argument. + +Bugfixes: + +- If a translation domain is set for a fill slot, use this setting + instead of the macro template domain. + +- The Python expression compiler now correctly decodes HTML entities + ``'gt'`` and ``'lt'``. This fixes issue #32. + +- The string expression compiler now correctly handles encoded text + (when support for encoded strings is enabled). This fixes issue #35. + +- Fixed an issue where setting the ``filename`` attribute on a + file-based template would not automatically cause an invalidation. + +- Exceptions raised by Chameleon can now be copied via + ``copy.copy``. This fixes issue #36. + [leorochael] + +- If copying the exception fails in the exception handler, simply + re-raise the original exception and log a warning. + +2.2 (2011-07-28) +---------------- + +Features: + +- Added new expression type ``load:`` that allows loading a + template. Both relative and absolute paths are supported. If the + path given is relative, then it will be resolved with respect to the + directory of the template. + +- Added support for dynamic evaluation of expressions. + + Note that this is to support legacy applications. It is not + currently wired into the provided template classes. + +- Template classes now have a ``builtins`` attribute which may be used + to define built-in variables always available in the template + variable scope. + +Incompatibilities: + +- The file-based template class no longer accepts a parameter + ``loader``. This parameter would be used to load a template from a + relative path, using a ``find(filename)`` method. This was however, + undocumented, and probably not very useful since we have the + ``TemplateLoader`` mechanism already. + +- The compiled template module now contains an ``initialize`` function + which takes values that map to the template builtins. The return + value of this function is a dictionary that contains the render + functions. + +Bugfixes: + +- The file-based template class no longer verifies the existance of a + template file (using ``os.lstat``). This now happens implicitly if + eager parsing is enabled, or otherwise when first needed (e.g. at + render time). + + This is classified as a bug fix because the previous behavior was + probably not what you'd expect, especially if an application + initializes a lot of templates without needing to render them + immediately. + +2.1.1 (2011-07-28) +------------------ + +Features: + +- Improved exception display. The expression string is now shown in + the context of the original source (if available) with a marker + string indicating the location of the expression in the template + source. + +Bugfixes: + +- The ``structure`` insertion mode now correctly decodes entities for + any expression type (including ``string:``). This fixes issue #30. + +- Don't show internal variables in the exception formatter variable + listing. + +2.1 (2011-07-25) +---------------- + +Features: + +- Expression interpolation (using the ``${...}`` operator and + previously also ``$identifier``) now requires braces everywhere + except inside the ``string:`` expression type. + + This change is motivated by a number of legacy templates in which + the interpolation format without braces ``$identifier`` appears as + text. + +2.0.2 (2011-07-25) +------------------ + +Bugfixes: + +- Don't use dynamic variable scope for lambda-scoped variables (#27). + +- Avoid duplication of exception class and message in traceback. + +- Fixed issue where a ``metal:fill-slot`` would be ignored if a macro + was set to be used on the same element (#16). + +2.0.1 (2011-07-23) +------------------ + +Bugfixes: + +- Fixed issue where global variable definition from macro slots would + fail (they would instead be local). This also affects error + reporting from inside slots because this would be recorded + internally as a global. + +- Fixed issue with template cache digest (used for filenames); modules + are now invalidated whenever any changes are made to the + distribution set available (packages on ``sys.path``). + +- Fixed exception handler to better let exceptions propagate through + the renderer. + +- The disk-based module compiler now mangles template source filenames + such that the output Python module is valid and at root level (dots + and hyphens are replaced by an underscore). This fixes issue #17. + +- Fixed translations (i18n) on Python 2.5. + +2.0 (2011-07-14) +---------------- + +- Point release. + +2.0-rc14 (2011-07-13) +--------------------- + +Bugfixes: + +- The tab character (``\t``) is now parsed correctly when used inside + tags. + +Features: + +- The ``RepeatDict`` class now works as a proxy behind a seperate + dictionary instance. + +- Added template constructor option ``keep_body`` which is a flag + (also available as a class attribute) that controls whether to save + the template body input in the ``body`` attribute. + + This is disabled by default, unless debug-mode is enabled. + +- The page template loader class now accepts an optional ``formats`` + argument which can be used to select an alternative template class. + +2.0-rc13 (2011-07-07) +--------------------- + +Bugfixes: + +- The backslash character (followed by optional whitespace and a line + break) was not correctly interpreted as a continuation for Python + expressions. + +Features: + +- The Python expression implementation is now more flexible for + external subclassing via a new ``parse`` method. + +2.0-rc12 (2011-07-04) +--------------------- + +Bugfixes: + +- Initial keyword arguments passed to a template now no longer "leak" + into the template variable space after a macro call. + +- An unexpected end tag is now an unrecoverable error. + +Features: + +- Improve exception output. + +2.0-rc11 (2011-05-26) +--------------------- + +Bugfixes: + +- Fixed issue where variable names that begin with an underscore were + seemingly allowed, but their use resulted in a compiler error. + +Features: + +- Template variable names are now allowed to be prefixed with a single + underscore, but not two or more (reserved for internal use). + + Examples of valid names:: + + item + ITEM + _item + camelCase + underscore_delimited + help + +- Added support for Genshi's comment "drop" syntax:: + + + + Note the additional exclamation (!) character. + + This fixes addresses issue #10. + +2.0-rc10 (2011-05-24) +--------------------- + +Bugfixes: + +- The ``tal:attributes`` statement now correctly operates + case-insensitive. The attribute name given in the statement will + replace an existing attribute with the same name, without respect to + case. + +Features: + +- Added ``meta:interpolation`` statement to control expression + interpolation setting. + + Strings that disable the setting: ``"off"`` and ``"false"``. + Strings that enable the setting: ``"on"`` and ``"true"``. + +- Expression interpolation now works inside XML comments. + +2.0-rc9 (2011-05-05) +-------------------- + +Features: + +- Better debugging support for string decode and conversion. If a + naive join fails, each element in the output will now be attempted + coerced to unicode to try and trigger the failure near to the bad + string. + +2.0-rc8 (2011-04-11) +-------------------- + +Bugfixes: + +- If a macro defines two slots with the same name, a caller will now + fill both with a single usage. + +- If a valid of ``None`` is provided as the translation function + argument, we now fall back to the class default. + +2.0-rc7 (2011-03-29) +-------------------- + +Bugfixes: + +- Fixed issue with Python 2.5 compatibility AST. This affected at + least PyPy 1.4. + +Features: + +- The ``auto_reload`` setting now defaults to the class value; the + base template class gives a default value of + ``chameleon.config.AUTO_RELOAD``. This change allows a subclass to + provide a custom default value (such as an application-specific + debug mode setting). + + +2.0-rc6 (2011-03-19) +-------------------- + +Features: + +- Added support for ``target_language`` keyword argument to render + method. If provided, the argument will be curried onto the + translation function. + +Bugfixes: + +- The HTML entities 'lt', 'gt' and 'quot' appearing inside content + subtition expressions are now translated into their native character + values. This fixes an issue where you could not dynamically create + elements using the ``structure`` (which is possible in ZPT). The + need to create such structure stems from the lack of an expression + interpolation operator in ZPT. + +- Fixed duplicate file pointer issue with test suite (affected Windows + platforms only). This fixes issue #9. + [oliora] + +- Use already open file using ``os.fdopen`` when trying to write out + the module source. This fixes LP #731803. + + +2.0-rc5 (2011-03-07) +-------------------- + +Bugfixes: + +- Fixed a number of issues concerning the escaping of attribute + values: + + 1) Static attribute values are now included as they appear in the + source. + + This means that invalid attribute values such as ``"true && + false"`` are now left alone. It's not the job of the template + engine to correct such markup, at least not in the default mode + of operation. + + 2) The string expression compiler no longer unescapes + values. Instead, this is left to each expression + compiler. Currently only the Python expression compiler unescapes + its input. + + 3) The dynamic escape code sequence now correctly only replaces + ampersands that are part of an HTML escape format. + +Imports: + +- The page template classes and the loader class can now be imported + directly from the ``chameleon`` module. + +Features: + +- If a custom template loader is not provided, relative paths are now + resolved using ``os.abspath`` (i.e. to the current working + directory). + +- Absolute paths are normalized using ``os.path.normpath`` and + ``os.path.expanduser``. This ensures that all paths are kept in + their "canonical" form. + + +2.0-rc4 (2011-03-03) +-------------------- + +Bugfixes: + +- Fixed an issue where the output of an end-to-end string expression + would raise an exception if the expression evaluated to ``None`` (it + should simply output nothing). + +- The ``convert`` function (which is configurable on the template + class level) now defaults to the ``translate`` function (at + run-time). + + This fixes an issue where message objects were not translated (and + thus converted to a string) using the a provided ``translate`` + function. + +- Fixed string interpolation issue where an expression immediately + succeeded by a right curly bracket would not parse. + + This fixes issue #5. + +- Fixed error where ``tal:condition`` would be evaluated after + ``tal:repeat``. + +Features: + +- Python expression is now a TALES expression. That means that the + pipe operator can be used to chain two or more expressions in a + try-except sequence. + + This behavior was ported from the 1.x series. Note that while it's + still possible to use the pipe character ("|") in an expression, it + must now be escaped. + +- The template cache can now be shared by multiple processes. + + +2.0-rc3 (2011-03-02) +-------------------- + +Bugfixes: + +- Fixed ``atexit`` handler. + + This fixes issue #3. + +- If a cache directory is specified, it will now be used even when not + in debug mode. + +- Allow "comment" attribute in the TAL namespace. + + This fixes an issue in the sense that the reference engine allows + any attribute within the TAL namespace. However, only "comment" is + in common use. + +- The template constructor now accepts a flag ``debug`` which puts the + template *instance* into debug-mode regardless of the global + setting. + + This fixes issue #1. + +Features: + +- Added exception handler for exceptions raised while evaluating an + expression. + + This handler raises (or attempts to) a new exception of the type + ``RenderError``, with an additional base class of the original + exception class. The string value of the exception is a formatted + error message which includes the expression that caused the + exception. + + If we are unable to create the exception class, the original + exception is re-raised. + +2.0-rc2 (2011-02-28) +-------------------- + +- Fixed upload issue. + +2.0-rc1 (2011-02-28) +-------------------- + +- Initial public release. See documentation for what's new in this + series. diff --git a/lib3/Chameleon-2.9.2/COPYRIGHT.txt b/lib3/Chameleon-2.9.2/COPYRIGHT.txt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/COPYRIGHT.txt @@ -0,0 +1,7 @@ +Copyright (c) 2011 Malthe Borch and Contributors. All Rights Reserved. + +Portions (c) Zope Foundation and contributors (http://www.zope.org/). + +Portions (c) Edgewall Software. + +Portions (c) 2008 Armin Ronacher. diff --git a/lib3/Chameleon-2.9.2/LICENSE.txt b/lib3/Chameleon-2.9.2/LICENSE.txt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/LICENSE.txt @@ -0,0 +1,185 @@ +The majority of the code in Chameleon is supplied under this license: + + A copyright notice accompanies this license document that identifies + the copyright holders. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + 1. Redistributions in source code must retain the accompanying + copyright notice, this list of conditions, and the following + disclaimer. + + 2. Redistributions in binary form must reproduce the accompanying + copyright notice, this list of conditions, and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + 3. Names of the copyright holders must not be used to endorse or + promote products derived from this software without prior + written permission from the copyright holders. + + 4. If any files are modified, you must cause the modified files to + carry prominent notices stating that you changed the files and + the date of any change. + + Disclaimer + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND + ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + +Portions of the code in Chameleon are supplied under the ZPL (headers +within individiual files indicate that these portions are licensed +under the ZPL): + + Zope Public License (ZPL) Version 2.1 + ------------------------------------- + + A copyright notice accompanies this license document that + identifies the copyright holders. + + This license has been certified as open source. It has also + been designated as GPL compatible by the Free Software + Foundation (FSF). + + Redistribution and use in source and binary forms, with or + without modification, are permitted provided that the + following conditions are met: + + 1. Redistributions in source code must retain the + accompanying copyright notice, this list of conditions, + and the following disclaimer. + + 2. Redistributions in binary form must reproduce the accompanying + copyright notice, this list of conditions, and the + following disclaimer in the documentation and/or other + materials provided with the distribution. + + 3. Names of the copyright holders must not be used to + endorse or promote products derived from this software + without prior written permission from the copyright + holders. + + 4. The right to distribute this software or to use it for + any purpose does not give you the right to use + Servicemarks (sm) or Trademarks (tm) of the copyright + holders. Use of them is covered by separate agreement + with the copyright holders. + + 5. If any files are modified, you must cause the modified + files to carry prominent notices stating that you changed + the files and the date of any change. + + Disclaimer + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' + AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT + NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN + NO EVENT SHALL THE COPYRIGHT HOLDERS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE + OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH + DAMAGE. + +Portions of the code in Chameleon are supplied under the BSD license +(headers within individiual files indicate that these portions are +licensed under this license): + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + 3. The name of the author may not be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS + OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE + GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER + IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN + IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Portions of the code in Chameleon are supplied under the Python +License (headers within individiual files indicate that these portions +are licensed under this license): + + PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 + -------------------------------------------- + + 1. This LICENSE AGREEMENT is between the Python Software Foundation + ("PSF"), and the Individual or Organization ("Licensee") accessing and + otherwise using this software ("Python") in source or binary form and + its associated documentation. + + 2. Subject to the terms and conditions of this License Agreement, PSF + hereby grants Licensee a nonexclusive, royalty-free, world-wide + license to reproduce, analyze, test, perform and/or display publicly, + prepare derivative works, distribute, and otherwise use Python + alone or in any derivative version, provided, however, that PSF's + License Agreement and PSF's notice of copyright, i.e., "Copyright (c) + 2001, 2002, 2003, 2004 Python Software Foundation; All Rights Reserved" + are retained in Python alone or in any derivative version prepared + by Licensee. + + 3. In the event Licensee prepares a derivative work that is based on + or incorporates Python or any part thereof, and wants to make + the derivative work available to others as provided herein, then + Licensee hereby agrees to include in any such work a brief summary of + the changes made to Python. + + 4. PSF is making Python available to Licensee on an "AS IS" + basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR + IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND + DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT + INFRINGE ANY THIRD PARTY RIGHTS. + + 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS + A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, + OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 7. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between PSF and + Licensee. This License Agreement does not grant permission to use PSF + trademarks or trade name in a trademark sense to endorse or promote + products or services of Licensee, or any third party. + + 8. By copying, installing or otherwise using Python, Licensee + agrees to be bound by the terms and conditions of this License + Agreement. diff --git a/lib3/Chameleon-2.9.2/Makefile b/lib3/Chameleon-2.9.2/Makefile new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/Makefile @@ -0,0 +1,89 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = docs +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) + +.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + -rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Chameleon.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Chameleon.qhc" + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ + "run these through (pdf)latex." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." diff --git a/lib3/Chameleon-2.9.2/PKG-INFO b/lib3/Chameleon-2.9.2/PKG-INFO new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/PKG-INFO @@ -0,0 +1,1122 @@ +Metadata-Version: 1.1 +Name: Chameleon +Version: 2.9.2 +Summary: Fast HTML/XML Template Compiler. +Home-page: http://www.pagetemplates.org/ +Author: Malthe Borch +Author-email: mborch at gmail.com +License: BSD-like (http://repoze.org/license.html) +Description: Overview + ======== + + Chameleon is an HTML/XML template engine for `Python + `_. It uses the *page templates* language. + + You can use it in any Python web application with just about any + version of Python (2.5 and up, including 3.x and `pypy + `_). + + Visit the `website `_ for more information + or the `documentation `_. + + License and Copyright + --------------------- + + This software is made available as-is under a BSD-like license [1]_ + (see included copyright notice). + + + Notes + ----- + + .. [1] This software is licensed under the `Repoze + `_ license. + + + Changes + ======= + + 2.9.2 (2012-06-06) + ------------------ + + Bugfixes: + + - Fixed a PyPy incompatibility. + + - Fixed issue #109 which caused testing failures on some platforms. + + 2.9.1 (2012-06-01) + ------------------ + + Bugfixes: + + - Fixed issue #103. The ``tal:on-error`` statement now always adds an + explicit end-tag to the element, even with a substitution content of + nothing. + + - Fixed issue #113. The ``tal:on-error`` statement now works correctly + also for dynamic attributes. That is, the fallback tag now includes + only static attributes. + + - Fixed name error which prevented the benchmark from running + correctly. + + Compatibility: + + - Fixed deprecation warning on Python 3 for zope interface implements + declaration. This fixes issue #116. + + 2.9.0 (2012-05-31) + ------------------ + + Features: + + - The translation function now gets the ``econtext`` argument as the + value for ``context``. Note that historically, this was usually an + HTTP request which might provide language negotiation data through a + dictionary interface. + [alvinyue] + + Bugfixes: + + - Fixed import alias issue which would lead to a syntax error in + generated Python code. Fixes issue #114. + + 2.8.5 (2012-05-02) + ------------------ + + Bugfixes: + + - Fixed minor installation issues on Python 2.5 and 3. + [ppaez] + + - Ensure output is unicode even when trivial (an empty string). + + 2.8.4 (2012-04-18) + ------------------ + + Features: + + - In exception output, long filenames are now truncated to 60 + characters of output, preventing line wrap which makes it difficult + to scan the exception output. + + Bugfixes: + + - Include filename and location in exception output for exceptions + raised during compilation. + + - If a trivial translation substitution variable is given (i.e. an + empty string), simply ignore it. This fixes issue #106. + + 2.8.3 (2012-04-16) + ------------------ + + Features: + + - Log template source on debug-level before cooking. + + - The `target_language` argument, if given, is now available as a + variable in templates. + + 2.8.2 (2012-03-30) + ------------------ + + Features: + + - Temporary caches used in debug mode are cleaned up eagerly, rather + than waiting for process termination. + [mitchellrj] + + Bugfixes: + + - The `index`, `start` and `end` methods on the TAL repeat object are + now callable. This fixes an incompatibility with ZPT. + + - The loader now correctly handles absolute paths on Windows. + [rdale] + + 2.8.1 (2012-03-29) + ------------------ + + Features: + + - The exception formatter now lists errors in 'wrapping order'. This + means that the innermost, and presumably most relevant exception is + shown last. + + Bugfixes: + + - The exception formatter now correctly recognizes nested errors and + does not rewrap the dynamically generated exception class. + + - The exception formatter now correctly sets the ``__module__`` + attribute to that of the original exception class. + + 2.8.0 (2012-02-29) + ------------------ + + Features: + + - Added support for code blocks using the `` processing + instruction syntax. + + The scope is name assignments is up until the nearest macro + definition, or the template itself if macros are not used. + + Bugfixes: + + - Fall back to the exception class' ``__new__`` method to safely + create an exception object that is not implemented in Python. + + - The exception formatter now keeps track of already formatted + exceptions, and ignores them from further output. + + 2.7.4 (2012-02-27) + ------------------ + + - The error handler now invokes the ``__init__`` method of + ``BaseException`` instead of the possibly overriden method (which + may take required arguments). This fixes issue #97. + [j23d, malthe] + + 2.7.3 (2012-01-16) + ------------------ + + Bugfixes: + + - The trim whitespace option now correctly trims actual whitespace to + a single character, appearing either to the left or to the right of + an element prefix or suffix string. + + 2.7.2 (2012-01-08) + ------------------ + + Features: + + - Added option ``trim_attribute_space`` that decides whether attribute + whitespace is stripped (at most down to a single space). This option + exists to provide compatibility with the reference + implementation. Fixes issue #85. + + Bugfixes: + + - Ignore unhashable builtins when generating a reverse builtin + map to quickly look up a builtin value. + [malthe] + + - Apply translation mapping even when a translation function is not + available. This fixes issue #83. + [malthe] + + - Fixed issue #80. The translation domain for a slot is defined by the + source document, i.e. the template providing the content for a slot + whether it be the default or provided through ``metal:fill-slot``. + [jcbrand] + + - In certain circumstances, a Unicode non-breaking space character would cause + a define clause to fail to parse. + + 2.7.1 (2011-12-29) + ------------------ + + Features: + + - Enable expression interpolation in CDATA. + + - The page template class now implements dictionary access to macros:: + + template[name] + + This is a short-hand for:: + + template.macros[name] + + Bugfixes: + + - An invalid define clause would be silently ignored; we now raise a + language error exception. This fixes issue #79. + + - Fixed regression where ``${...}`` interpolation expressions could + not span multiple lines. This fixes issue #77. + + 2.7.0 (2011-12-13) + ------------------ + + Features: + + - The ``load:`` expression now derives from the string expression such + that the ``${...}`` operator can be used for expression + interpolation. + + - The ``load:`` expression now accepts asset specs; these are resolved + by the ``pkg_resources.resource_filename`` function:: + + : + + An example from the test suite:: + + chameleon:tests/inputs/hello_world.pt + + Bugfixes: + + - If an attribute name for translation was not a valid Python + identifier, the compiler would generate invalid code. This has been + fixed, and the compiler now also throws an exception if an attribute + specification contains a comma. (Note that the only valid separator + character is the semicolon, when specifying attributes for + translation via the ``i18n:translate`` statement). This addresses + issue #76. + + 2.6.2 (2011-12-08) + ------------------ + + Bugfixes: + + - Fixed issue where ``tal:on-error`` would not respect + ``tal:omit-tag`` or namespace elements which are omitted by default + (such as ````). + + - Fixed issue where ``macros`` attribute would not be available on + file-based templates due to incorrect initialization. + + - The ``TryExcept`` and ``TryFinally`` AST nodes are not available on + Python 3.3. These have been aliased to ``Try``. This fixes issue + #75. + + Features: + + - The TAL repeat item now makes a security declaration that grants + access to unprotected subobjects on the Zope 2 platform:: + + __allow_access_to_unprotected_subobjects__ = True + + This is required for legacy compatibility and does not affect other + environments. + + - The template object now has a method ``write(body)`` which + explicitly decodes and cooks a string input. + + - Added configuration option ``loader_class`` which sets the class + used to create the template loader object. + + The class (essentially a callable) is created at template + construction time. + + 2.6.1 (2011-11-30) + ------------------ + + Bugfixes: + + - Decode HTML entities in expression interpolation strings. This fixes + issue #74. + + - Allow ``xml`` and ``xmlns`` attributes on TAL, I18N and METAL + namespace elements. This fixes issue #73. + + 2.6.0 (2011-11-24) + ------------------ + + Features: + + - Added support for implicit translation: + + The ``implicit_i18n_translate`` option enables implicit translation + of text. The ``implicit_i18n_attributes`` enables implicit + translation of attributes. The latter must be a set and for an + attribute to be implicitly translated, its lowercase string value + must be included in the set. + + - Added option ``strict`` (enabled by default) which decides whether + expressions are required to be valid at compile time. That is, if + not set, an exception is only raised for an invalid expression at + evaluation time. + + - An expression error now results in an exception only if the + expression is attempted evaluated during a rendering. + + - Added a configuration option ``prepend_relative_search_path`` which + decides whether the path relative to a file-based template is + prepended to the load search path. The default is ``True``. + + - Added a configuration option ``search_path`` to the file-based + template class, which adds additional paths to the template load + instance bound to the ``load:`` expression. The option takes a + string path or an iterable yielding string paths. The default value + is the empty set. + + Bugfixes: + + - Exception instances now support pickle/unpickle. + + - An attributes in i18n:attributes no longer needs to match an + existing or dynamic attribute in order to appear in the + element. This fixes issue #66. + + 2.5.3 (2011-10-23) + ------------------ + + Bugfixes: + + - Fixed an issue where a nested macro slot definition would fail even + though there existed a parent macro definition. This fixes issue + #69. + + 2.5.2 (2011-10-12) + ------------------ + + Bugfixes: + + - Fixed an issue where technically invalid input would result in a + compiler error. + + Features: + + - The markup class now inherits from the unicode string type such that + it's compatible with the string interface. + + 2.5.1 (2011-09-29) + ------------------ + + Bugfixes: + + - The symbol names "convert", "decode" and "translate" are now no + longer set as read-only *compiler internals*. This fixes issue #65. + + - Fixed an issue where a macro extension chain nested two levels (a + template uses a macro that extends a macro) would lose the middle + slot definitions if slots were defined nested. + + The compiler now throws an error if a nested slot definition is used + outside a macro extension context. + + 2.5.0 (2011-09-23) + ------------------ + + Features: + + - An expression type ``structure:`` is now available which wraps the + expression result as *structure* such that it is not escaped on + insertion, e.g.:: + +
+ ${structure: context.body} +
+ + This also means that the ``structure`` keyword for ``tal:content`` + and ``tal:replace`` now has an alternative spelling via the + expression type ``structure:``. + + - The string-based template constructor now accepts encoded input. + + 2.4.6 (2011-09-23) + ------------------ + + Bugfixes: + + - The ``tal:on-error`` statement should catch all exceptions. + + - Fixed issue that would prevent escaping of interpolation expression + values appearing in text. + + 2.4.5 (2011-09-21) + ------------------ + + Bugfixes: + + - The ``tal:on-error`` handler should have a ``error`` variable + defined that has the value of the exception thrown. + + - The ``tal:on-error`` statement is a substitution statement and + should support the "text" and "structure" insertion methods. + + 2.4.4 (2011-09-15) + ------------------ + + Bugfixes: + + - An encoding specified in the XML document preamble is now read and + used to decode the template input to unicode. This fixes issue #55. + + - Encoded expression input on Python 3 is now correctly + decoded. Previously, the string representation output would be + included instead of an actually decoded string. + + - Expression result conversion steps are now correctly included in + error handling such that the exception output points to the + expression location. + + 2.4.3 (2011-09-13) + ------------------ + + Features: + + - When an encoding is provided, pass the 'ignore' flag to avoid + decoding issues with bad input. + + Bugfixes: + + - Fixed pypy compatibility issue (introduced in previous release). + + 2.4.2 (2011-09-13) + ------------------ + + Bugfixes: + + - Fixed an issue in the compiler where an internal variable (such as a + translation default value) would be cached, resulting in variable + scope corruption (see issue #49). + + 2.4.1 (2011-09-08) + ------------------ + + Bugfixes: + + - Fixed an issue where a default value for an attribute would + sometimes spill over into another attribute. + + - Fixed issue where the use of the ``default`` name in an attribute + interpolation expression would print the attribute value. This is + unexpected, because it's an expression, not a static text suitable + for output. An attribute value of ``default`` now correctly drops + the attribute. + + 2.4.0 (2011-08-22) + ------------------ + + Features: + + - Added an option ``boolean_attributes`` to evaluate and render a + provided set of attributes using a boolean logic: if the attribute + is a true value, the value will be the attribute name, otherwise the + attribute is dropped. + + In the reference implementation, the following attributes are + configured as boolean values when the template is rendered in + HTML-mode:: + + "compact", "nowrap", "ismap", "declare", "noshade", + "checked", "disabled", "readonly", "multiple", "selected", + "noresize", "defer" + + Note that in Chameleon, these attributes must be manually provided. + + Bugfixes: + + - The carriage return character (used on Windows platforms) would + incorrectly be included in Python comments. + + It is now replaced with a line break. + + This fixes issue #44. + + 2.3.8 (2011-08-19) + ------------------ + + - Fixed import error that affected Python 2.5 only. + + 2.3.7 (2011-08-19) + ------------------ + + Features: + + - Added an option ``literal_false`` that disables the default behavior + of dropping an attribute for a value of ``False`` (in addition to + ``None``). This modified behavior is the behavior exhibited in + reference implementation. + + Bugfixes: + + - Undo attribute special HTML attribute behavior (see previous + release). + + This turned out not to be a compatible behavior; rather, boolean + values should simply be coerced to a string. + + Meanwhile, the reference implementation does support an HTML mode in + which the special attribute behavior is exhibited. + + We do not currently support this mode. + + 2.3.6 (2011-08-18) + ------------------ + + Features: + + - Certain HTML attribute names now have a special behavior for a + attribute value of ``True`` (or ``default`` if no default is + defined). For these attributes, this return value will result in the + name being printed as the value:: + + + + will be rendered as:: + + + + This behavior is compatible with the reference implementation. + + 2.3.5 (2011-08-18) + ------------------ + + Features: + + - Added support for the set operator (``{item, item, ...}``). + + Bugfixes: + + - If macro is defined on the same element as a translation name, this + no longer results in a "translation name not allowed outside + translation" error. This fixes issue #43. + + - Attribute fallback to dictionary lookup now works on multiple items + (e.g. ``d1.d2.d2``). This fixes issue #42. + + 2.3.4 (2011-08-16) + ------------------ + + Features: + + - When inserting content in either attributes or text, a value of + ``True`` (like ``False`` and ``None``) will result in no + action. + + - Use statically assigned variables for ``"attrs"`` and + ``"default"``. This change yields a performance improvement of + 15-20%. + + - The template loader class now accepts an optional argument + ``default_extension`` which accepts a filename extension which will + be appended to the filename if there's not already an extension. + + Bugfixes: + + - The default symbol is now ``True`` for an attribute if the attribute + default is not provided. Note that the result is that the attribute + is dropped. This fixes issue #41. + + - Fixed an issue where assignment to a variable ``"type"`` would + fail. This fixes issue #40. + + - Fixed an issue where an (unsuccesful) assignment for a repeat loop + to a compiler internal name would not result in an error. + + - If the translation function returns the identical object, manually + coerce it to string. This fixes a compatibility issue with + translation functions which do not convert non-string objects to a + string value, but simply return them unchanged. + + 2.3.3 (2011-08-15) + ------------------ + + Features: + + - The ``load:`` expression now passes the initial keyword arguments to + its template loader (e.g. ``auto_reload`` and ``encoding``). + + - In the exception output, string variable values are now limited to a + limited output of characters, single line only. + + Bugfixes: + + - Fixed horizontal alignment of exception location info + (i.e. 'String:', 'Filename:' and 'Location:') such that they match + the template exception formatter. + + 2.3.2 (2011-08-11) + ------------------ + + Bugfixes: + + - Fixed issue where i18n:domain would not be inherited through macros + and slots. This fixes issue #37. + + 2.3.1 (2011-08-11) + ------------------ + + Features: + + - The ``Builtin`` node type may now be used to represent any Python + local or global name. This allows expression compilers to refer to + e.g. ``get`` or ``getitem``, or to explicit require a builtin object + such as one from the ``extra_builtins`` dictionary. + + Bugfixes: + + - Builtins which are not explicitly disallowed may now be redefined + and used as variables (e.g. ``nothing``). + + - Fixed compiler issue with circular node annotation loop. + + 2.3 (2011-08-10) + ---------------- + + Features: + + - Added support for the following syntax to disable inline evaluation + in a comment: + + + + Note that the initial question mark character (?) will be omitted + from output. + + - The parser now accepts '<' and '>' in attributes. Note that this is + invalid markup. Previously, the '<' would not be accepted as a valid + attribute value, but this would result in an 'unexpected end tag' + error elsewhere. This fixes issue #38. + + - The expression compiler now provides methods ``assign_text`` and + ``assign_value`` such that a template engine might configure this + value conversion to support e.g. encoded strings. + + Note that currently, the only client for the ``assign_text`` method + is the string expression type. + + - Enable template loader for string-based template classes. Note that + the ``filename`` keyword argument may be provided on initialization + to identify the template source by filename. This fixes issue #36. + + - Added ``extra_builtins`` option to the page template class. These + builtins are added to the default builtins dictionary at cook time + and may be provided at initialization using the ``extra_builtins`` + keyword argument. + + Bugfixes: + + - If a translation domain is set for a fill slot, use this setting + instead of the macro template domain. + + - The Python expression compiler now correctly decodes HTML entities + ``'gt'`` and ``'lt'``. This fixes issue #32. + + - The string expression compiler now correctly handles encoded text + (when support for encoded strings is enabled). This fixes issue #35. + + - Fixed an issue where setting the ``filename`` attribute on a + file-based template would not automatically cause an invalidation. + + - Exceptions raised by Chameleon can now be copied via + ``copy.copy``. This fixes issue #36. + [leorochael] + + - If copying the exception fails in the exception handler, simply + re-raise the original exception and log a warning. + + 2.2 (2011-07-28) + ---------------- + + Features: + + - Added new expression type ``load:`` that allows loading a + template. Both relative and absolute paths are supported. If the + path given is relative, then it will be resolved with respect to the + directory of the template. + + - Added support for dynamic evaluation of expressions. + + Note that this is to support legacy applications. It is not + currently wired into the provided template classes. + + - Template classes now have a ``builtins`` attribute which may be used + to define built-in variables always available in the template + variable scope. + + Incompatibilities: + + - The file-based template class no longer accepts a parameter + ``loader``. This parameter would be used to load a template from a + relative path, using a ``find(filename)`` method. This was however, + undocumented, and probably not very useful since we have the + ``TemplateLoader`` mechanism already. + + - The compiled template module now contains an ``initialize`` function + which takes values that map to the template builtins. The return + value of this function is a dictionary that contains the render + functions. + + Bugfixes: + + - The file-based template class no longer verifies the existance of a + template file (using ``os.lstat``). This now happens implicitly if + eager parsing is enabled, or otherwise when first needed (e.g. at + render time). + + This is classified as a bug fix because the previous behavior was + probably not what you'd expect, especially if an application + initializes a lot of templates without needing to render them + immediately. + + 2.1.1 (2011-07-28) + ------------------ + + Features: + + - Improved exception display. The expression string is now shown in + the context of the original source (if available) with a marker + string indicating the location of the expression in the template + source. + + Bugfixes: + + - The ``structure`` insertion mode now correctly decodes entities for + any expression type (including ``string:``). This fixes issue #30. + + - Don't show internal variables in the exception formatter variable + listing. + + 2.1 (2011-07-25) + ---------------- + + Features: + + - Expression interpolation (using the ``${...}`` operator and + previously also ``$identifier``) now requires braces everywhere + except inside the ``string:`` expression type. + + This change is motivated by a number of legacy templates in which + the interpolation format without braces ``$identifier`` appears as + text. + + 2.0.2 (2011-07-25) + ------------------ + + Bugfixes: + + - Don't use dynamic variable scope for lambda-scoped variables (#27). + + - Avoid duplication of exception class and message in traceback. + + - Fixed issue where a ``metal:fill-slot`` would be ignored if a macro + was set to be used on the same element (#16). + + 2.0.1 (2011-07-23) + ------------------ + + Bugfixes: + + - Fixed issue where global variable definition from macro slots would + fail (they would instead be local). This also affects error + reporting from inside slots because this would be recorded + internally as a global. + + - Fixed issue with template cache digest (used for filenames); modules + are now invalidated whenever any changes are made to the + distribution set available (packages on ``sys.path``). + + - Fixed exception handler to better let exceptions propagate through + the renderer. + + - The disk-based module compiler now mangles template source filenames + such that the output Python module is valid and at root level (dots + and hyphens are replaced by an underscore). This fixes issue #17. + + - Fixed translations (i18n) on Python 2.5. + + 2.0 (2011-07-14) + ---------------- + + - Point release. + + 2.0-rc14 (2011-07-13) + --------------------- + + Bugfixes: + + - The tab character (``\t``) is now parsed correctly when used inside + tags. + + Features: + + - The ``RepeatDict`` class now works as a proxy behind a seperate + dictionary instance. + + - Added template constructor option ``keep_body`` which is a flag + (also available as a class attribute) that controls whether to save + the template body input in the ``body`` attribute. + + This is disabled by default, unless debug-mode is enabled. + + - The page template loader class now accepts an optional ``formats`` + argument which can be used to select an alternative template class. + + 2.0-rc13 (2011-07-07) + --------------------- + + Bugfixes: + + - The backslash character (followed by optional whitespace and a line + break) was not correctly interpreted as a continuation for Python + expressions. + + Features: + + - The Python expression implementation is now more flexible for + external subclassing via a new ``parse`` method. + + 2.0-rc12 (2011-07-04) + --------------------- + + Bugfixes: + + - Initial keyword arguments passed to a template now no longer "leak" + into the template variable space after a macro call. + + - An unexpected end tag is now an unrecoverable error. + + Features: + + - Improve exception output. + + 2.0-rc11 (2011-05-26) + --------------------- + + Bugfixes: + + - Fixed issue where variable names that begin with an underscore were + seemingly allowed, but their use resulted in a compiler error. + + Features: + + - Template variable names are now allowed to be prefixed with a single + underscore, but not two or more (reserved for internal use). + + Examples of valid names:: + + item + ITEM + _item + camelCase + underscore_delimited + help + + - Added support for Genshi's comment "drop" syntax:: + + + + Note the additional exclamation (!) character. + + This fixes addresses issue #10. + + 2.0-rc10 (2011-05-24) + --------------------- + + Bugfixes: + + - The ``tal:attributes`` statement now correctly operates + case-insensitive. The attribute name given in the statement will + replace an existing attribute with the same name, without respect to + case. + + Features: + + - Added ``meta:interpolation`` statement to control expression + interpolation setting. + + Strings that disable the setting: ``"off"`` and ``"false"``. + Strings that enable the setting: ``"on"`` and ``"true"``. + + - Expression interpolation now works inside XML comments. + + 2.0-rc9 (2011-05-05) + -------------------- + + Features: + + - Better debugging support for string decode and conversion. If a + naive join fails, each element in the output will now be attempted + coerced to unicode to try and trigger the failure near to the bad + string. + + 2.0-rc8 (2011-04-11) + -------------------- + + Bugfixes: + + - If a macro defines two slots with the same name, a caller will now + fill both with a single usage. + + - If a valid of ``None`` is provided as the translation function + argument, we now fall back to the class default. + + 2.0-rc7 (2011-03-29) + -------------------- + + Bugfixes: + + - Fixed issue with Python 2.5 compatibility AST. This affected at + least PyPy 1.4. + + Features: + + - The ``auto_reload`` setting now defaults to the class value; the + base template class gives a default value of + ``chameleon.config.AUTO_RELOAD``. This change allows a subclass to + provide a custom default value (such as an application-specific + debug mode setting). + + + 2.0-rc6 (2011-03-19) + -------------------- + + Features: + + - Added support for ``target_language`` keyword argument to render + method. If provided, the argument will be curried onto the + translation function. + + Bugfixes: + + - The HTML entities 'lt', 'gt' and 'quot' appearing inside content + subtition expressions are now translated into their native character + values. This fixes an issue where you could not dynamically create + elements using the ``structure`` (which is possible in ZPT). The + need to create such structure stems from the lack of an expression + interpolation operator in ZPT. + + - Fixed duplicate file pointer issue with test suite (affected Windows + platforms only). This fixes issue #9. + [oliora] + + - Use already open file using ``os.fdopen`` when trying to write out + the module source. This fixes LP #731803. + + + 2.0-rc5 (2011-03-07) + -------------------- + + Bugfixes: + + - Fixed a number of issues concerning the escaping of attribute + values: + + 1) Static attribute values are now included as they appear in the + source. + + This means that invalid attribute values such as ``"true && + false"`` are now left alone. It's not the job of the template + engine to correct such markup, at least not in the default mode + of operation. + + 2) The string expression compiler no longer unescapes + values. Instead, this is left to each expression + compiler. Currently only the Python expression compiler unescapes + its input. + + 3) The dynamic escape code sequence now correctly only replaces + ampersands that are part of an HTML escape format. + + Imports: + + - The page template classes and the loader class can now be imported + directly from the ``chameleon`` module. + + Features: + + - If a custom template loader is not provided, relative paths are now + resolved using ``os.abspath`` (i.e. to the current working + directory). + + - Absolute paths are normalized using ``os.path.normpath`` and + ``os.path.expanduser``. This ensures that all paths are kept in + their "canonical" form. + + + 2.0-rc4 (2011-03-03) + -------------------- + + Bugfixes: + + - Fixed an issue where the output of an end-to-end string expression + would raise an exception if the expression evaluated to ``None`` (it + should simply output nothing). + + - The ``convert`` function (which is configurable on the template + class level) now defaults to the ``translate`` function (at + run-time). + + This fixes an issue where message objects were not translated (and + thus converted to a string) using the a provided ``translate`` + function. + + - Fixed string interpolation issue where an expression immediately + succeeded by a right curly bracket would not parse. + + This fixes issue #5. + + - Fixed error where ``tal:condition`` would be evaluated after + ``tal:repeat``. + + Features: + + - Python expression is now a TALES expression. That means that the + pipe operator can be used to chain two or more expressions in a + try-except sequence. + + This behavior was ported from the 1.x series. Note that while it's + still possible to use the pipe character ("|") in an expression, it + must now be escaped. + + - The template cache can now be shared by multiple processes. + + + 2.0-rc3 (2011-03-02) + -------------------- + + Bugfixes: + + - Fixed ``atexit`` handler. + + This fixes issue #3. + + - If a cache directory is specified, it will now be used even when not + in debug mode. + + - Allow "comment" attribute in the TAL namespace. + + This fixes an issue in the sense that the reference engine allows + any attribute within the TAL namespace. However, only "comment" is + in common use. + + - The template constructor now accepts a flag ``debug`` which puts the + template *instance* into debug-mode regardless of the global + setting. + + This fixes issue #1. + + Features: + + - Added exception handler for exceptions raised while evaluating an + expression. + + This handler raises (or attempts to) a new exception of the type + ``RenderError``, with an additional base class of the original + exception class. The string value of the exception is a formatted + error message which includes the expression that caused the + exception. + + If we are unable to create the exception class, the original + exception is re-raised. + + 2.0-rc2 (2011-02-28) + -------------------- + + - Fixed upload issue. + + 2.0-rc1 (2011-02-28) + -------------------- + + - Initial public release. See documentation for what's new in this + series. + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 diff --git a/lib3/Chameleon-2.9.2/README.rst b/lib3/Chameleon-2.9.2/README.rst new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/README.rst @@ -0,0 +1,25 @@ +Overview +======== + +Chameleon is an HTML/XML template engine for `Python +`_. It uses the *page templates* language. + +You can use it in any Python web application with just about any +version of Python (2.5 and up, including 3.x and `pypy +`_). + +Visit the `website `_ for more information +or the `documentation `_. + +License and Copyright +--------------------- + +This software is made available as-is under a BSD-like license [1]_ +(see included copyright notice). + + +Notes +----- + +.. [1] This software is licensed under the `Repoze + `_ license. diff --git a/lib3/Chameleon-2.9.2/benchmarks/bm_chameleon.py b/lib3/Chameleon-2.9.2/benchmarks/bm_chameleon.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/benchmarks/bm_chameleon.py @@ -0,0 +1,128 @@ +#!/usr/bin/python2 + +""" +Benchmark for test the performance of Chameleon page template engine. +""" + +__author__ = "mborch at gmail.com (Malthe Borch)" + +# Python imports +import os +import sys +import optparse +import time + +# Local imports +import util + + +def relative(*args): + return os.path.join(os.path.dirname(os.path.abspath(__file__)), *args) + +sys.path.insert(0, relative('..', 'src')) + +# Chameleon imports +from chameleon import PageTemplate + + +LOREM_IPSUM = """Quisque lobortis hendrerit posuere. Curabitur +aliquet consequat sapien molestie pretium. Nunc adipiscing luc +tus mi, viverra porttitor lorem vulputate et. Ut at purus sem, +sed tincidunt ante. Vestibulum ante ipsum primis in faucibus +orci luctus et ultrices posuere cubilia Curae; Praesent pulvinar +sodales justo at congue. Praesent aliquet facilisis nisl a +molestie. Sed tempus nisl ut augue eleifend tincidunt. Sed a +lacinia nulla. Cras tortor est, mollis et consequat at, +vulputate et orci. Nulla sollicitudin""" + +BASE_TEMPLATE = ''' + +
+ + + +
${col}
+ ${alt} + + + ${title.strip()} + + +''' + +PAGE_TEMPLATE = ''' + + + +images: + + + + +

${lorem}

+
+ + +''' + +CONTENT_TEMPLATE = ''' + +fun1 +fun2 +fun3 +fun4 +fun5 +fun6 + +

Lorem ipsum dolor sit amet, consectetur adipiscing elit. +Nam laoreet justo in velit faucibus lobortis. Sed dictum sagittis +volutpat. Sed adipiscing vestibulum consequat. Nullam laoreet, ante +nec pretium varius, libero arcu porttitor orci, id cursus odio nibh +nec leo. Vestibulum dapibus pellentesque purus, sed bibendum tortor +laoreet id. Praesent quis sodales ipsum. Fusce ut ligula sed diam +pretium sagittis vel at ipsum. Nulla sagittis sem quam, et volutpat +velit. Fusce dapibus ligula quis lectus ultricies tempor. Pellente

+ + + + + + + + +''' + + +def test_mako(count): + template = PageTemplate(CONTENT_TEMPLATE) + base = PageTemplate(BASE_TEMPLATE) + page = PageTemplate(PAGE_TEMPLATE) + + table = [xrange(150) for i in xrange(150)] + paragraphs = xrange(50) + title = 'Hello world!' + + times = [] + for i in range(count): + t0 = time.time() + data = template.render( + table=table, paragraphs=paragraphs, + lorem=LOREM_IPSUM, title=title, + img_count=50, + base=base, + page=page, + ) + t1 = time.time() + times.append(t1-t0) + return times + +if __name__ == "__main__": + parser = optparse.OptionParser( + usage="%prog [options]", + description=("Test the performance of Chameleon templates.")) + util.add_standard_options_to(parser) + (options, args) = parser.parse_args() + + util.run_benchmark(options, options.num_runs, test_mako) diff --git a/lib3/Chameleon-2.9.2/benchmarks/bm_mako.py b/lib3/Chameleon-2.9.2/benchmarks/bm_mako.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/benchmarks/bm_mako.py @@ -0,0 +1,153 @@ +#!/usr/bin/python + +""" +Benchmark for test the performance of Mako templates engine. +Includes: + -two template inherences + -HTML escaping, XML escaping, URL escaping, whitespace trimming + -function defitions and calls + -forloops +""" + +__author__ = "virhilo at gmail.com (Lukasz Fidosz)" + +# Python imports +import os +import sys +import optparse +import time + +# Local imports +import util + +def relative(*args): + return os.path.join(os.path.dirname(os.path.abspath(__file__)), *args) + +sys.path.insert(0, relative('..', 'lib')) + +# Mako imports +from mako.template import Template +from mako.lookup import TemplateLookup + + +LOREM_IPSUM = """Quisque lobortis hendrerit posuere. Curabitur +aliquet consequat sapien molestie pretium. Nunc adipiscing luc +tus mi, viverra porttitor lorem vulputate et. Ut at purus sem, +sed tincidunt ante. Vestibulum ante ipsum primis in faucibus +orci luctus et ultrices posuere cubilia Curae; Praesent pulvinar +sodales justo at congue. Praesent aliquet facilisis nisl a +molestie. Sed tempus nisl ut augue eleifend tincidunt. Sed a +lacinia nulla. Cras tortor est, mollis et consequat at, +vulputate et orci. Nulla sollicitudin""" + +BASE_TEMPLATE = """ +<%def name="render_table(table)"> +
+ % for row in table: + + % for col in row: + + % endfor + + % endfor +
${col|h}
+ +<%def name="img(src, alt)"> + ${alt} + + + ${title|h,trim} + + ${next.body()} + + +""" + +PAGE_TEMPLATE = """ +<%inherit file="base.mako"/> + + % for row in table: + + % for col in row: + + % endfor + + % endfor +
${col}
+% for nr in xrange(img_count): + ${parent.img('/foo/bar/baz.png', 'no image :o')} +% endfor +${next.body()} +% for nr in paragraphs: +

${lorem|x}

+% endfor +${parent.render_table(table)} +""" + +CONTENT_TEMPLATE = """ +<%inherit file="page.mako"/> +<%def name="fun1()"> + fun1 + +<%def name="fun2()"> + fun2 + +<%def name="fun3()"> + foo3 + +<%def name="fun4()"> + foo4 + +<%def name="fun5()"> + foo5 + +<%def name="fun6()"> + foo6 + +

Lorem ipsum dolor sit amet, consectetur adipiscing elit. +Nam laoreet justo in velit faucibus lobortis. Sed dictum sagittis +volutpat. Sed adipiscing vestibulum consequat. Nullam laoreet, ante +nec pretium varius, libero arcu porttitor orci, id cursus odio nibh +nec leo. Vestibulum dapibus pellentesque purus, sed bibendum tortor +laoreet id. Praesent quis sodales ipsum. Fusce ut ligula sed diam +pretium sagittis vel at ipsum. Nulla sagittis sem quam, et volutpat +velit. Fusce dapibus ligula quis lectus ultricies tempor. Pellente

+${fun1()} +${fun2()} +${fun3()} +${fun4()} +${fun5()} +${fun6()} +""" + + +def test_mako(count): + + lookup = TemplateLookup() + lookup.put_string('base.mako', BASE_TEMPLATE) + lookup.put_string('page.mako', PAGE_TEMPLATE) + + template = Template(CONTENT_TEMPLATE, lookup=lookup) + + table = [xrange(150) for i in xrange(150)] + paragraphs = xrange(50) + title = 'Hello world!' + + times = [] + for i in range(count): + t0 = time.time() + data = template.render(table=table, paragraphs=paragraphs, + lorem=LOREM_IPSUM, title=title, + img_count=50) + t1 = time.time() + times.append(t1-t0) + return times + +if __name__ == "__main__": + parser = optparse.OptionParser( + usage="%prog [options]", + description=("Test the performance of Mako templates.")) + util.add_standard_options_to(parser) + (options, args) = parser.parse_args() + + util.run_benchmark(options, options.num_runs, test_mako) diff --git a/lib3/Chameleon-2.9.2/benchmarks/util.py b/lib3/Chameleon-2.9.2/benchmarks/util.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/benchmarks/util.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python + +"""Utility code for benchmark scripts.""" + +__author__ = "collinwinter at google.com (Collin Winter)" + +import math +import operator + + +def run_benchmark(options, num_runs, bench_func, *args): + """Run the given benchmark, print results to stdout. + + Args: + options: optparse.Values instance. + num_runs: number of times to run the benchmark + bench_func: benchmark function. `num_runs, *args` will be passed to this + function. This should return a list of floats (benchmark execution + times). + """ + if options.profile: + import cProfile + prof = cProfile.Profile() + prof.runcall(bench_func, num_runs, *args) + prof.print_stats(sort=options.profile_sort) + else: + data = bench_func(num_runs, *args) + if options.take_geo_mean: + product = reduce(operator.mul, data, 1) + print math.pow(product, 1.0 / len(data)) + else: + for x in data: + print x + + +def add_standard_options_to(parser): + """Add a bunch of common command-line flags to an existing OptionParser. + + This function operates on `parser` in-place. + + Args: + parser: optparse.OptionParser instance. + """ + parser.add_option("-n", action="store", type="int", default=100, + dest="num_runs", help="Number of times to run the test.") + parser.add_option("--profile", action="store_true", + help="Run the benchmark through cProfile.") + parser.add_option("--profile_sort", action="store", type="str", + default="time", help="Column to sort cProfile output by.") + parser.add_option("--take_geo_mean", action="store_true", + help="Return the geo mean, rather than individual data.") diff --git a/lib3/Chameleon-2.9.2/distribute_setup.py b/lib3/Chameleon-2.9.2/distribute_setup.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/distribute_setup.py @@ -0,0 +1,485 @@ +#!python +"""Bootstrap distribute installation + +If you want to use setuptools in your package's setup.py, just include this +file in the same directory with it, and add this to the top of your setup.py:: + + from distribute_setup import use_setuptools + use_setuptools() + +If you want to require a specific version of setuptools, set a download +mirror, or use an alternate download directory, you can do so by supplying +the appropriate options to ``use_setuptools()``. + +This file can also be run as a script to install or upgrade setuptools. +""" +import os +import sys +import time +import fnmatch +import tempfile +import tarfile +from distutils import log + +try: + from site import USER_SITE +except ImportError: + USER_SITE = None + +try: + import subprocess + + def _python_cmd(*args): + args = (sys.executable,) + args + return subprocess.call(args) == 0 + +except ImportError: + # will be used for python 2.3 + def _python_cmd(*args): + args = (sys.executable,) + args + # quoting arguments if windows + if sys.platform == 'win32': + def quote(arg): + if ' ' in arg: + return '"%s"' % arg + return arg + args = [quote(arg) for arg in args] + return os.spawnl(os.P_WAIT, sys.executable, *args) == 0 + +DEFAULT_VERSION = "0.6.14" +DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/" +SETUPTOOLS_FAKED_VERSION = "0.6c11" + +SETUPTOOLS_PKG_INFO = """\ +Metadata-Version: 1.0 +Name: setuptools +Version: %s +Summary: xxxx +Home-page: xxx +Author: xxx +Author-email: xxx +License: xxx +Description: xxx +""" % SETUPTOOLS_FAKED_VERSION + + +def _install(tarball): + # extracting the tarball + tmpdir = tempfile.mkdtemp() + log.warn('Extracting in %s', tmpdir) + old_wd = os.getcwd() + try: + os.chdir(tmpdir) + tar = tarfile.open(tarball) + _extractall(tar) + tar.close() + + # going in the directory + subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) + os.chdir(subdir) + log.warn('Now working in %s', subdir) + + # installing + log.warn('Installing Distribute') + if not _python_cmd('setup.py', 'install'): + log.warn('Something went wrong during the installation.') + log.warn('See the error message above.') + finally: + os.chdir(old_wd) + + +def _build_egg(egg, tarball, to_dir): + # extracting the tarball + tmpdir = tempfile.mkdtemp() + log.warn('Extracting in %s', tmpdir) + old_wd = os.getcwd() + try: + os.chdir(tmpdir) + tar = tarfile.open(tarball) + _extractall(tar) + tar.close() + + # going in the directory + subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) + os.chdir(subdir) + log.warn('Now working in %s', subdir) + + # building an egg + log.warn('Building a Distribute egg in %s', to_dir) + _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) + + finally: + os.chdir(old_wd) + # returning the result + log.warn(egg) + if not os.path.exists(egg): + raise IOError('Could not build the egg.') + + +def _do_download(version, download_base, to_dir, download_delay): + egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg' + % (version, sys.version_info[0], sys.version_info[1])) + if not os.path.exists(egg): + tarball = download_setuptools(version, download_base, + to_dir, download_delay) + _build_egg(egg, tarball, to_dir) + sys.path.insert(0, egg) + import setuptools + setuptools.bootstrap_install_from = egg + + +def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, + to_dir=os.curdir, download_delay=15, no_fake=True): + # making sure we use the absolute path + to_dir = os.path.abspath(to_dir) + was_imported = 'pkg_resources' in sys.modules or \ + 'setuptools' in sys.modules + try: + try: + import pkg_resources + if not hasattr(pkg_resources, '_distribute'): + if not no_fake: + _fake_setuptools() + raise ImportError + except ImportError: + return _do_download(version, download_base, to_dir, download_delay) + try: + pkg_resources.require("distribute>="+version) + return + except pkg_resources.VersionConflict: + e = sys.exc_info()[1] + if was_imported: + sys.stderr.write( + "The required version of distribute (>=%s) is not available,\n" + "and can't be installed while this script is running. Please\n" + "install a more recent version first, using\n" + "'easy_install -U distribute'." + "\n\n(Currently using %r)\n" % (version, e.args[0])) + sys.exit(2) + else: + del pkg_resources, sys.modules['pkg_resources'] # reload ok + return _do_download(version, download_base, to_dir, + download_delay) + except pkg_resources.DistributionNotFound: + return _do_download(version, download_base, to_dir, + download_delay) + finally: + if not no_fake: + _create_fake_setuptools_pkg_info(to_dir) + +def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, + to_dir=os.curdir, delay=15): + """Download distribute from a specified location and return its filename + + `version` should be a valid distribute version number that is available + as an egg for download under the `download_base` URL (which should end + with a '/'). `to_dir` is the directory where the egg will be downloaded. + `delay` is the number of seconds to pause before an actual download + attempt. + """ + # making sure we use the absolute path + to_dir = os.path.abspath(to_dir) + try: + from urllib.request import urlopen + except ImportError: + from urllib2 import urlopen + tgz_name = "distribute-%s.tar.gz" % version + url = download_base + tgz_name + saveto = os.path.join(to_dir, tgz_name) + src = dst = None + if not os.path.exists(saveto): # Avoid repeated downloads + try: + log.warn("Downloading %s", url) + src = urlopen(url) + # Read/write all in one block, so we don't create a corrupt file + # if the download is interrupted. + data = src.read() + dst = open(saveto, "wb") + dst.write(data) + finally: + if src: + src.close() + if dst: + dst.close() + return os.path.realpath(saveto) + +def _no_sandbox(function): + def __no_sandbox(*args, **kw): + try: + from setuptools.sandbox import DirectorySandbox + if not hasattr(DirectorySandbox, '_old'): + def violation(*args): + pass + DirectorySandbox._old = DirectorySandbox._violation + DirectorySandbox._violation = violation + patched = True + else: + patched = False + except ImportError: + patched = False + + try: + return function(*args, **kw) + finally: + if patched: + DirectorySandbox._violation = DirectorySandbox._old + del DirectorySandbox._old + + return __no_sandbox + +def _patch_file(path, content): + """Will backup the file then patch it""" + existing_content = open(path).read() + if existing_content == content: + # already patched + log.warn('Already patched.') + return False + log.warn('Patching...') + _rename_path(path) + f = open(path, 'w') + try: + f.write(content) + finally: + f.close() + return True + +_patch_file = _no_sandbox(_patch_file) + +def _same_content(path, content): + return open(path).read() == content + +def _rename_path(path): + new_name = path + '.OLD.%s' % time.time() + log.warn('Renaming %s into %s', path, new_name) + os.rename(path, new_name) + return new_name + +def _remove_flat_installation(placeholder): + if not os.path.isdir(placeholder): + log.warn('Unkown installation at %s', placeholder) + return False + found = False + for file in os.listdir(placeholder): + if fnmatch.fnmatch(file, 'setuptools*.egg-info'): + found = True + break + if not found: + log.warn('Could not locate setuptools*.egg-info') + return + + log.warn('Removing elements out of the way...') + pkg_info = os.path.join(placeholder, file) + if os.path.isdir(pkg_info): + patched = _patch_egg_dir(pkg_info) + else: + patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO) + + if not patched: + log.warn('%s already patched.', pkg_info) + return False + # now let's move the files out of the way + for element in ('setuptools', 'pkg_resources.py', 'site.py'): + element = os.path.join(placeholder, element) + if os.path.exists(element): + _rename_path(element) + else: + log.warn('Could not find the %s element of the ' + 'Setuptools distribution', element) + return True + +_remove_flat_installation = _no_sandbox(_remove_flat_installation) + +def _after_install(dist): + log.warn('After install bootstrap.') + placeholder = dist.get_command_obj('install').install_purelib + _create_fake_setuptools_pkg_info(placeholder) + +def _create_fake_setuptools_pkg_info(placeholder): + if not placeholder or not os.path.exists(placeholder): + log.warn('Could not find the install location') + return + pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1]) + setuptools_file = 'setuptools-%s-py%s.egg-info' % \ + (SETUPTOOLS_FAKED_VERSION, pyver) + pkg_info = os.path.join(placeholder, setuptools_file) + if os.path.exists(pkg_info): + log.warn('%s already exists', pkg_info) + return + + log.warn('Creating %s', pkg_info) + f = open(pkg_info, 'w') + try: + f.write(SETUPTOOLS_PKG_INFO) + finally: + f.close() + + pth_file = os.path.join(placeholder, 'setuptools.pth') + log.warn('Creating %s', pth_file) + f = open(pth_file, 'w') + try: + f.write(os.path.join(os.curdir, setuptools_file)) + finally: + f.close() + +_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info) + +def _patch_egg_dir(path): + # let's check if it's already patched + pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') + if os.path.exists(pkg_info): + if _same_content(pkg_info, SETUPTOOLS_PKG_INFO): + log.warn('%s already patched.', pkg_info) + return False + _rename_path(path) + os.mkdir(path) + os.mkdir(os.path.join(path, 'EGG-INFO')) + pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') + f = open(pkg_info, 'w') + try: + f.write(SETUPTOOLS_PKG_INFO) + finally: + f.close() + return True + +_patch_egg_dir = _no_sandbox(_patch_egg_dir) + +def _before_install(): + log.warn('Before install bootstrap.') + _fake_setuptools() + + +def _under_prefix(location): + if 'install' not in sys.argv: + return True + args = sys.argv[sys.argv.index('install')+1:] + for index, arg in enumerate(args): + for option in ('--root', '--prefix'): + if arg.startswith('%s=' % option): + top_dir = arg.split('root=')[-1] + return location.startswith(top_dir) + elif arg == option: + if len(args) > index: + top_dir = args[index+1] + return location.startswith(top_dir) + if arg == '--user' and USER_SITE is not None: + return location.startswith(USER_SITE) + return True + + +def _fake_setuptools(): + log.warn('Scanning installed packages') + try: + import pkg_resources + except ImportError: + # we're cool + log.warn('Setuptools or Distribute does not seem to be installed.') + return + ws = pkg_resources.working_set + try: + setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools', + replacement=False)) + except TypeError: + # old distribute API + setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools')) + + if setuptools_dist is None: + log.warn('No setuptools distribution found') + return + # detecting if it was already faked + setuptools_location = setuptools_dist.location + log.warn('Setuptools installation detected at %s', setuptools_location) + + # if --root or --preix was provided, and if + # setuptools is not located in them, we don't patch it + if not _under_prefix(setuptools_location): + log.warn('Not patching, --root or --prefix is installing Distribute' + ' in another location') + return + + # let's see if its an egg + if not setuptools_location.endswith('.egg'): + log.warn('Non-egg installation') + res = _remove_flat_installation(setuptools_location) + if not res: + return + else: + log.warn('Egg installation') + pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO') + if (os.path.exists(pkg_info) and + _same_content(pkg_info, SETUPTOOLS_PKG_INFO)): + log.warn('Already patched.') + return + log.warn('Patching...') + # let's create a fake egg replacing setuptools one + res = _patch_egg_dir(setuptools_location) + if not res: + return + log.warn('Patched done.') + _relaunch() + + +def _relaunch(): + log.warn('Relaunching...') + # we have to relaunch the process + # pip marker to avoid a relaunch bug + if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']: + sys.argv[0] = 'setup.py' + args = [sys.executable] + sys.argv + sys.exit(subprocess.call(args)) + + +def _extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + import copy + import operator + from tarfile import ExtractError + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directories with a safe mode. + directories.append(tarinfo) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 448 # decimal for oct 0700 + self.extract(tarinfo, path) + + # Reverse sort directories. + if sys.version_info < (2, 4): + def sorter(dir1, dir2): + return cmp(dir1.name, dir2.name) + directories.sort(sorter) + directories.reverse() + else: + directories.sort(key=operator.attrgetter('name'), reverse=True) + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError: + e = sys.exc_info()[1] + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + +def main(argv, version=DEFAULT_VERSION): + """Install or upgrade setuptools and EasyInstall""" + tarball = download_setuptools() + _install(tarball) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/lib3/Chameleon-2.9.2/docs/conf.py b/lib3/Chameleon-2.9.2/docs/conf.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/docs/conf.py @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- +# +# Chameleon documentation build configuration file, created by +# sphinx-quickstart on Sun Nov 1 16:08:00 2009. +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys, os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.append(os.path.abspath('.')) + +# -- General configuration ----------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = ['sphinx.ext.autodoc'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'Chameleon' +copyright = u'2008-2011 by Malthe Borch and the Repoze Community' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '2.8' +# The full version, including alpha/beta/rc tags. +release = '2.8.0' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of documents that shouldn't be included in the build. +#unused_docs = [] + +# List of directories, relative to source directory, that shouldn't be searched +# for source files. +exclude_trees = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. Major themes that come with +# Sphinx are currently 'default' and 'sphinxdoc'. +html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +html_title = "Chameleon %s documentation" % version + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bchameleonm, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_use_modindex = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = '' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'chameleondoc' + + +# -- Options for LaTeX output -------------------------------------------------- + +# The paper size ('letter' or 'a4'). +#latex_paper_size = 'letter' + +# The font size ('10pt', '11pt' or '12pt'). +#latex_font_size = '10pt' + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ('index', 'chameleon.tex', u'Chameleon Documentation', + u'Malthe Borch et. al', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# Additional stuff for the LaTeX preamble. +#latex_preamble = '' + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_use_modindex = True diff --git a/lib3/Chameleon-2.9.2/docs/configuration.rst b/lib3/Chameleon-2.9.2/docs/configuration.rst new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/docs/configuration.rst @@ -0,0 +1,43 @@ +Configuration +============= + +Most settings can be provided as keyword-arguments to the template +constructor classes. + +There are certain settings which are required at environment +level. Acceptable values are ``"0"``, ``"1"``, or the literals +``"true"`` or ``"false"`` (case-insensitive). + +General usage +------------- + +The following settings are useful in general. + +``CHAMELEON_EAGER`` + Parse and compile templates on instantiation. + +``CHAMELEON_CACHE`` + + When set to a file system path, the template compiler will write + its output to files in this directory and use it as a cache. + + This not only enables you to see the compiler output, but also + speeds up startup. + +``CHAMELEON_RELOAD`` + This setting controls the default value of the ``auto_reload`` + parameter. + +Development +----------- + +The following settings are mostly useful during development or +debugging of the library itself. + +``CHAMELEON_DEBUG`` + + Enables a set of debugging settings which make it easier to + discover and research issues with the engine itself. + + This implicitly enables auto-reload for any template. + diff --git a/lib3/Chameleon-2.9.2/docs/index.rst b/lib3/Chameleon-2.9.2/docs/index.rst new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/docs/index.rst @@ -0,0 +1,217 @@ +Chameleon +========= + +Chameleon is an HTML/XML template engine for `Python +`_. + +It's designed to generate the document output of a web application, +typically HTML markup or XML. + +The language used is *page templates*, originally a `Zope +`_ invention [1]_, but available here as a +:ref:`standalone library ` that you can use in any +script or application running Python 2.5 and up (including 3.x and +`pypy `_). It comes with a set of :ref:`new features +`, too. + +The template engine compiles templates into Python byte-code and is optimized +for speed. For a complex template language, the performance is +:ref:`very good `. + + *Found a bug?* Please report issues to the `issue tracker `_. + + *Need help?* Post to the Pylons `discussion list `_ or join the ``#pyramid`` channel on `Freenode IRC `_. + +Getting the code +---------------- + +You can `download `_ the +package from the Python package index or install the latest release +using setuptools or the newer `distribute +`_ (required for Python 3.x):: + + $ easy_install Chameleon + +.. _no-dependencies: + +There are no required library dependencies on Python 2.7 and up +[2]_. On 2.5 and 2.6, the `ordereddict +`_ and `unittest2 +`_ packages are set as +dependencies. + +The project is hosted in a `GitHub repository +`_. Code contributions are +welcome. The easiest way is to use the `pull request +`_ interface. + + +Introduction +------------ + +The *page templates* language is used within your document structure +as special element attributes and text markup. Using a set of simple +language constructs, you control the document flow, element +repetition, text replacement and translation. + +.. note:: If you've used page templates in a Zope environment previously, note that Chameleon uses Python as the default expression language (instead of *path* expressions). + +The basic language (known as the *template attribute language* or TAL) +is simple enough to grasp from an example: + +.. code-block:: genshi + + + +

Hello, ${'world'}!

+ + + + +
+ ${row.capitalize()} ${col} +
+ + + +The ``${...}`` notation is short-hand for text insertion [3]_. The +Python-expression inside the braces is evaluated and the result +included in the output. By default, the string is escaped before +insertion. To avoid this, use the ``structure:`` prefix: + +.. code-block:: genshi + +
${structure: ...}
+ +Note that if the expression result is an object that implements an +``__html__()`` method [4]_, this method will be called and the result +treated as "structure". An example of such an object is the +``Markup`` class that's included as a utility:: + + from chameleon.utils import Markup + username = "%s" % username + +The macro language (known as the *macro expansion language* or METAL) +provides a means of filling in portions of a generic template. + +On the left, the macro template; on the right, a template that loads +and uses the macro, filling in the "content" slot: + +.. code-block:: genshi + + +

${structure: document.body}

+ Example — ${document.title} + + +

${document.title}

+ +
+ +
+ + + +In the example, the expression type :ref:`load ` is +used to retrieve a template from the file system using a path relative +to the calling template. + +The METAL system works with TAL such that you can for instance fill in +a slot that appears in a ``tal:repeat`` loop, or refer to variables +defined using ``tal:define``. + +The third language subset is the translation system (known as the +*internationalization language* or I18N): + +.. code-block:: genshi + + + + ... + +
+ You have ${round(amount, 2)} dollars in your account. +
+ + ... + + + +Each translation message is marked up using ``i18n:translate`` and +values can be mapped using ``i18n:name``. Attributes are marked for +translation using ``i18n:attributes``. The template engine generates +`gettext `_ translation strings from +the markup:: + + "You have ${amount} dollars in your account." + +If you use a web framework such as `Pyramid +`_, the translation +system is set up automatically and will negotiate on a *target +language* based on the HTTP request or other parameter. If not, then +you need to configure this manually. + +Next steps +---------- + +This was just an introduction. There are a number of other basic +statements that you need to know in order to use the language. This is +all covered in the :ref:`language reference `. + +If you're already familiar with the page template language, you can +skip ahead to the :ref:`getting started ` +section to learn how to use the template engine in your code. + +To learn about integration with your favorite web framework see the +section on :ref:`framework integration `. + +License +------- + +This software is made available under a BSD-like license. + + +Contents +======== + +.. toctree:: + :maxdepth: 2 + + library.rst + reference.rst + integration.rst + configuration.rst + +Indices and Tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + +Notes +===== + +.. [1] The template language specifications and API for the Page + Templates engine are based on Zope Page Templates (see in + particular `zope.pagetemplate + `_). However, + the Chameleon compiler and Page Templates engine is an entirely + new codebase, packaged as a standalone distribution. It does + not require a Zope software environment. + +.. [2] The translation system in Chameleon is pluggable and based on + `gettext `_. + There is built-in support for the `zope.i18n + `_ package. If this + package is installed, it will be used by default. The + `translationstring + `_ package + offers some of the same helper and utility classes, without the + Zope application interface. + +.. [3] This syntax was taken from `Genshi `_. + +.. [4] See the `WebHelpers + `_ + library which provide a simple wrapper around this method. diff --git a/lib3/Chameleon-2.9.2/docs/integration.rst b/lib3/Chameleon-2.9.2/docs/integration.rst new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/docs/integration.rst @@ -0,0 +1,41 @@ +.. _framework-integration: + +Integration +=========== + +Integration with Chameleon is available for a number of popular web +frameworks. The framework will usually provide loading mechanisms and +translation (internationalization) configuration. + +Pyramid +------- + +Chameleon is the default template engine for the `Pyramid +`_ framework. See the +section on `Page Templates +`_ for a complete reference. + +Zope 2 / Plone +-------------- + +Install the `five.pt `_ package +to replace the reference template engine (globally). + +Zope Toolkit (ZTK) +------------------ + +Install the `z3c.pt `_ package for +applications based on the `Zope Toolkit +`_ (ZTK). Note that you need to +explicit use the template classes from this package. + +Grok +---- + +Support for the `Grok `_ framework is available +in the `grokcore.chameleon +`_ package. + +This package will setup Grok's policy for templating integration and +associate the Chameleon template components for the ``.cpt`` template +filename extension. diff --git a/lib3/Chameleon-2.9.2/docs/library.rst b/lib3/Chameleon-2.9.2/docs/library.rst new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/docs/library.rst @@ -0,0 +1,238 @@ +Library Documentation +===================== + +This section documents the package as a Python library. To learn about +the page template language, consult the :ref:`language reference +`. + +.. _getting-started-with-cpt: + +Getting started +--------------- + +There are several template constructor classes available, one for each +of the combinations *text* or *xml*, and *string* or *file*. + +The file-based constructor requires an absolute path. To set up a +templates directory *once*, use the template loader class:: + + import os + + path = os.path.dirname(__file__) + + from chameleon import PageTemplateLoader + templates = PageTemplateLoader(os.path.join(path, "templates")) + +Then, to load a template relative to the provided path, use dictionary +syntax:: + + template = templates['hello.pt'] + +Alternatively, use the appropriate template class directly. Let's try +with a string input:: + + from chameleon import PageTemplate + template = PageTemplate("
Hello, ${name}.
") + +All template instances are callable. Provide variables by keyword +argument:: + + >>> template(name='John') + '
Hello, John.
' + +.. _fast: + +Performance +----------- + +The template engine compiles (or *translates*) template source code +into Python byte-code. In simple templates this yields an increase in +performance of about 7 times in comparison to the reference +implementation. + +In benchmarks for the content management system `Plone +`_, switching to Chameleon yields a request to +response improvement of 20-50%. + +Extension +--------- + +You can extend the language through the expression engine by writing +your own expression compiler. + +Let's try and write an expression compiler for an expression type that +will simply uppercase the supplied value. We'll call it ``upper``. + +You can write such a compiler as a closure: + +.. code-block:: python + + import ast + + def uppercase_expression(string): + def compiler(target, engine): + uppercased = self.string.uppercase() + value = ast.Str(uppercased) + return [ast.Assign(targets=[target], value=value)] + return compiler + +To make it available under a certain prefix, we'll add it to the +expression types dictionary. + +.. code-block:: python + + from chameleon import PageTemplate + PageTemplate.expression_types['upper'] = uppercase_expression + +Alternatively, you could subclass the template class and set the +attribute ``expression_types`` to a dictionary that includes your +expression: + +.. code-block:: python + + from chameleon import PageTemplateFile + from chameleon.tales import PythonExpr + + class MyPageTemplateFile(PageTemplateFile): + expression_types = { + 'python': PythonExpr, + 'upper': uppercase_expression + } + +You can now uppercase strings *natively* in your templates:: + +
+ +It's probably best to stick with a Python expression:: + +
+ + +.. _whats-new: + +Changes between 1.x and 2.x +--------------------------- + +This sections describes new features, improvements and changes from +1.x to 2.x. + +New parser +~~~~~~~~~~ + +This series features a new, custom-built parser, implemented in pure +Python. It parses both HTML and XML inputs (the previous parser relied +on the expat system library and was more strict about its input). + +The main benefit of the new parser is that the compiler is now able to +point to the source location of parse- and compilation errors much +more accurately. This should be a great aid in debugging these errors. + +Compatible output +~~~~~~~~~~~~~~~~~ + +The 2.x engine matches the output of the reference implementation more +closely (usually exactly). There are less differences altogether; for +instance, the method of escaping TALES expression (usually a +semicolon) has been changed to match that of the reference +implementation. + +New language features +~~~~~~~~~~~~~~~~~~~~~ + +This series also introduces a number of new language features: + +1. Support for the ``tal:on-error`` from the reference specification + has been added. + +2. Two new attributes ``tal:switch`` and ``tal:case`` have been added + to make element conditions more flexible. + + +Code improvements +~~~~~~~~~~~~~~~~~ + +The template classes have been refactored and simplified allowing +better reuse of code and more intuitive APIs on the lower levels. + +Expression engine +~~~~~~~~~~~~~~~~~ + +The expression engine has been redesigned to make it easier to +understand and extend. The new engine is based on the ``ast`` module +(available since Python 2.6; backports included for Python 2.5). This +means that expression compilers now need to return a valid list of AST +statements that include an assignment to the target node. + +Compiler +~~~~~~~~ + +The new compiler has been optimized for complex templates. As a +result, in the benchmark suite included with the package, this +compiler scores about half of the 1.x series. For most real world +applications, the engine should still perform as well as the 1.x +series. + + +API reference +------------- + +This section describes the documented API of the library. + +Template classes +~~~~~~~~~~~~~~~~ + +Use the ``PageTemplate*`` template classes to define a template from a +string or file input: + +.. automodule:: chameleon + + .. autoclass:: chameleon.PageTemplate + + Note: The remaining classes take the same general configuration + arguments. + + .. automethod:: render + + .. autoclass:: chameleon.PageTemplateFile(filename, **config) + + .. autoclass:: chameleon.PageTextTemplate + + .. autoclass:: chameleon.PageTextTemplateFile + +Template loader +~~~~~~~~~~~~~~~ + +Some systems have framework support for loading templates from +files. The following loader class is directly compatible with the +Pylons framework and may be adapted to other frameworks: + +.. class:: chameleon.PageTemplateLoader(search_path=None, default_extension=None, **config) + + Load templates from ``search_path`` (must be a string or a list of + strings):: + + templates = PageTemplateLoader(path) + example = templates['example.pt'] + + If ``default_extension`` is provided, this will be added to inputs + that do not already have an extension:: + + templates = PageTemplateLoader(path, ".pt") + example = templates['example'] + + Any additional keyword arguments will be passed to the template + constructor:: + + templates = PageTemplateLoader(path, debug=True, encoding="utf-8") + + .. automethod:: load + +Expression engine +~~~~~~~~~~~~~~~~~ + +For advanced integration, the compiler module provides support for +dynamic expression evaluation: + +.. automodule:: chameleon.compiler + + .. autoclass:: chameleon.compiler.ExpressionEvaluator diff --git a/lib3/Chameleon-2.9.2/docs/reference.rst b/lib3/Chameleon-2.9.2/docs/reference.rst new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/docs/reference.rst @@ -0,0 +1,1695 @@ +:tocdepth: 4 + +.. _language-reference: + +.. highlight:: xml + +Language Reference +================== + +The language reference is structured such that it can be read as a +general introduction to the *page templates* language. + +It's split into parts that correspond to each of the main language +features. + +Syntax +###### + +You can safely :ref:`skip this section ` if you're familiar with +how template languages work or just want to learn by example. + +An *attribute language* is a programming language designed to render +documents written in XML or HTML markup. The input must be a +well-formed document. The output from the template is usually +XML-like but isn't required to be well-formed. + +The statements of the language are document tags with special +attributes, and look like this:: + +

...

+ +In the above example, the attribute +``namespace-prefix:command="argument"`` is the statement, and the +entire paragraph tag is the statement's element. The statement's +element is the portion of the document on which this statement +operates. + +The namespace prefixes are typically declared once, at the top of a +template (note that prefix declarations for the template language +namespaces are omitted from the template output):: + + + ... + + +Thankfully, sane namespace prefix defaults are in place to let us skip +most of the boilerplate:: + + + +

...

+ + + +Note how ``tal`` is used without an explicit namespace +declaration. Chameleon sets up defaults for ``metal`` and ``i18n`` as +well. + +.. note:: Default prefixes are a special feature of Chameleon. + +.. _tal: + +Basics (TAL) +############ + +The *template attribute language* is used to create dynamic XML-like +content. It allows elements of a document to be replaced, repeated, +or omitted. + +Statements +---------- + +These are the available statements: + +================== ============== + Statement Description +================== ============== +``tal:define`` Define variables. +``tal:switch`` Defines a switch condition +``tal:condition`` Include element only if expression is true. +``tal:repeat`` Repeat an element. +``tal:case`` Includes element only if expression is equal to parent switch. +``tal:content`` Substitute the content of an element. +``tal:replace`` Replace the element with dynamic content. +``tal:omit-tag`` Omit the element tags, leaving only the inner content. +``tal:attributes`` Dynamically change or insert element attributes. +``tal:on-error`` Substitute the content of an element if processing fails. +================== ============== + +When there is only one TAL statement per element, the order in which +they are executed is simple. Starting with the root element, each +element's statements are executed, then each of its child elements is +visited, in order, to do the same:: + + + + + </meta> + <body> + <div tal:condition="items"> + <p>These are your items:</p> + <ul> + <li tal:repeat="item items" tal:content="item" /> + </ul> + </div> + </body> + </html> + +Any combination of statements may appear on the same element, except +that the ``tal:content`` and ``tal:replace`` statements may not be +used on the same element. + +.. note:: The ``tal:case`` and ``tal:switch`` statements are available + in Chameleon only. + +TAL does not use use the order in which statements are written in the +tag to determine the order in which they are executed. When an +element has multiple statements, they are executed in the order +printed in the table above. + +There is a reasoning behind this ordering. Because users often want +to set up variables for use in other statements contained within this +element or subelements, ``tal:define`` is executed first. Then any +switch statement. ``tal:condition`` follows, then ``tal:repeat``, then +``tal:case``. We are now rendering an element; first ``tal:content`` +or ``tal:replace``. Finally, before ``tal:attributes``, we have +``tal:omit-tag`` (which is implied with ``tal:replace``). + +.. note:: *TALES* is used as the expression language for the "stuff in + the quotes". The default syntax is simply Python, but + other inputs are possible --- see the section on :ref:`expressions + <tales>`. + +``tal:attributes`` +^^^^^^^^^^^^^^^^^^ + +Updates or inserts element attributes. + +:: + + tal:attributes="href request.url" + +Syntax +~~~~~~ + +``tal:attributes`` syntax:: + + argument ::= attribute_statement [';' attribute_statement]* + attribute_statement ::= attribute_name expression + attribute_name ::= [namespace-prefix ':'] Name + namespace-prefix ::= Name + + +Description +~~~~~~~~~~~ + +The ``tal:attributes`` statement replaces the value of an attribute +(or creates an attribute) with a dynamic value. The +value of each expression is converted to a string, if necessary. + +.. note:: You can qualify an attribute name with a namespace prefix, + for example ``html:table``, if you are generating an XML document + with multiple namespaces. + +If an attribute expression evaluates to ``None``, the attribute is +deleted from the statement element (or simply not inserted). + +If the expression evaluates to the symbol ``default`` (a symbol which +is always available when evaluating attributes), its value is defined +as the default static attribute value. If there is no such default +value, a return value of ``default`` will drop the attribute. + +If you use ``tal:attributes`` on an element with an active +``tal:replace`` command, the ``tal:attributes`` statement is ignored. + +If you use ``tal:attributes`` on an element with a ``tal:repeat`` +statement, the replacement is made on each repetition of the element, +and the replacement expression is evaluated fresh for each repetition. + +.. note:: If you want to include a semicolon (";") in an expression, it + must be escaped by doubling it (";;") [1]_. + +Examples +~~~~~~~~ + +Replacing a link:: + + <a href="/sample/link.html" + tal:attributes="href context.url()" + > + ... + </a> + +Replacing two attributes:: + + <textarea rows="80" cols="20" + tal:attributes="rows request.rows();cols request.cols()" + /> + +A checkbox input:: + + <input type="input" tal:attributes="checked True" /> + +``tal:condition`` +^^^^^^^^^^^^^^^^^ + +Conditionally includes or omits an element:: + + <div tal:condition="comments"> + ... + </div> + +Syntax +~~~~~~ + +``tal:condition`` syntax:: + + argument ::= expression + +Description +~~~~~~~~~~~ + + The ``tal:condition`` statement includes the statement element in the + template only if the condition is met, and omits it otherwise. If + its expression evaluates to a *true* value, then normal processing of + the element continues, otherwise the statement element is immediately + removed from the template. For these purposes, the value ``nothing`` + is false, and ``default`` has the same effect as returning a true + value. + +.. note:: Like Python itself, ZPT considers None, zero, empty strings, + empty sequences, empty dictionaries, and instances which return a + nonzero value from ``__len__`` or ``__nonzero__`` false; all other + values are true, including ``default``. + +Examples +~~~~~~~~ + +Test a variable before inserting it:: + + <p tal:condition="request.message" tal:content="request.message" /> + +Testing for odd/even in a repeat-loop:: + + <div tal:repeat="item range(10)"> + <p tal:condition="repeat.item.even">Even</p> + <p tal:condition="repeat.item.odd">Odd</p> + </div> + +``tal:content`` +^^^^^^^^^^^^^^^ + +Replaces the content of an element. + +Syntax +~~~~~~ + +``tal:content`` syntax:: + + argument ::= (['text'] | 'structure') expression + +Description +~~~~~~~~~~~ + +Rather than replacing an entire element, you can insert text or +structure in place of its children with the ``tal:content`` statement. +The statement argument is exactly like that of ``tal:replace``, and is +interpreted in the same fashion. If the expression evaluates to +``nothing``, the statement element is left childless. If the +expression evaluates to ``default``, then the element's contents are +evaluated. + +The default replacement behavior is ``text``, which replaces +angle-brackets and ampersands with their HTML entity equivalents. The +``structure`` keyword passes the replacement text through unchanged, +allowing HTML/XML markup to be inserted. This can break your page if +the text contains unanticipated markup (eg. text submitted via a web +form), which is the reason that it is not the default. + +.. note:: The ``structure`` keyword exists to provide backwards + compatibility. In Chameleon, the ``structure:`` expression + type provides the same functionality (also for inline + expressions). + + +Examples +~~~~~~~~ + +Inserting the user name:: + + <p tal:content="user.getUserName()">Fred Farkas</p> + +Inserting HTML/XML:: + + <p tal:content="structure context.getStory()"> + Marked <b>up</b> content goes here. + </p> + +``tal:define`` +^^^^^^^^^^^^^^ + +Defines local variables. + +Syntax +~~~~~~ + +``tal:define`` syntax:: + + argument ::= define_scope [';' define_scope]* + define_scope ::= (['local'] | 'global') + define_var define_var ::= variable_name + expression variable_name ::= Name + +Description +~~~~~~~~~~~ + +The ``tal:define`` statement defines variables. When you define a +local variable in a statement element, you can use that variable in +that element and the elements it contains. If you redefine a variable +in a contained element, the new definition hides the outer element's +definition within the inner element. + +Note that valid variable names are any Python identifier string +including underscore, although two or more leading underscores are +disallowed (used internally by the compiler). Further, names are +case-sensitive. + +Python builtins are always "in scope", but most of them may be +redefined (such as ``help``). Exceptions are:: ``float``, ``int``, +``len``, ``long``, ``str``, ``None``, ``True`` and ``False``. + +In addition, the following names are reserved: ``econtext``, +``rcontext``, ``translate``, ``decode`` and ``convert``. + +If the expression associated with a variable evaluates to ``nothing``, +then that variable has the value ``nothing``, and may be used as such +in further expressions. Likewise, if the expression evaluates to +``default``, then the variable has the value ``default``, and may be +used as such in further expressions. + +You can define two different kinds of variables: *local* and +*global*. When you define a local variable in a statement element, you +can only use that variable in that element and the elements it +contains. If you redefine a local variable in a contained element, the +new definition hides the outer element's definition within the inner +element. When you define a global variables, you can use it in any +element processed after the defining element. If you redefine a global +variable, you replace its definition for the rest of the template. + +To set the definition scope of a variable, use the keywords ``local`` +or ``global`` in front of the assignment. The default setting is +``local``; thus, in practice, only the ``global`` keyword is used. + +.. note:: If you want to include a semicolon (";") in an expression, it + must be escaped by doubling it (";;") [1]_. + +Examples +~~~~~~~~ + +Defining a variable:: + + tal:define="company_name 'Zope Corp, Inc.'" + +Defining two variables, where the second depends on the first:: + + tal:define="mytitle context.title; tlen len(mytitle)" + + +``tal:switch`` and ``tal:case`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Defines a switch clause. + +:: + + <ul tal:switch="len(items) % 2"> + <li tal:case="True">odd</li> + <li tal:case="False">even</li> + </ul> + +Syntax +~~~~~~ + +``tal:case`` and ``tal:switch`` syntax:: + + argument ::= expression + +Description +~~~~~~~~~~~ + +The *switch* and *case* construct is a short-hand syntax for +evaluating a set of expressions against a parent value. + +The ``tal:switch`` statement is used to set a new parent value and the +``tal:case`` statement works like a condition and only allows content +if the expression matches the value. + +Note that if the case expression is the symbol ``default``, it always +matches the switch. + +.. note:: These statements are only available in Chameleon 2.x and not + part of the ZPT specification. + +Examples +~~~~~~~~ + +:: + + <ul tal:switch="item.type"> + <li tal:case="'document'"> + Document + </li> + <li tal:case="'folder'"> + Folder + </li> + </ul> + +Note that any and all cases that match the switch will be included. + + +``tal:omit-tag`` +^^^^^^^^^^^^^^^^ + +Removes an element, leaving its contents. + +Syntax +~~~~~~ + +``tal:omit-tag`` syntax:: + + argument ::= [ expression ] + +Description +~~~~~~~~~~~ + +The ``tal:omit-tag`` statement leaves the contents of an element in +place while omitting the surrounding start and end tags. + +If the expression evaluates to a *false* value, then normal processing +of the element continues and the tags are not omitted. If the +expression evaluates to a *true* value, or no expression is provided, +the statement element is replaced with its contents. + +.. note:: Like Python itself, ZPT considers None, zero, empty strings, + empty sequences, empty dictionaries, and instances which return a + nonzero value from ``__len__`` or ``__nonzero__`` false; all other + values are true, including ``default``. + +Examples +~~~~~~~~ + +Unconditionally omitting a tag:: + + <div tal:omit-tag="" comment="This tag will be removed"> + <i>...but this text will remain.</i> + </div> + +Conditionally omitting a tag:: + + <b tal:omit-tag="not:bold">I may be bold.</b> + +The above example will omit the ``b`` tag if the variable ``bold`` is false. + +Creating ten paragraph tags, with no enclosing tag:: + + <span tal:repeat="n range(10)" + tal:omit-tag=""> + <p tal:content="n">1</p> + </span> + +.. _tal_repeat: + +``tal:repeat`` +^^^^^^^^^^^^^^ + +Repeats an element. + +Syntax +~~~~~~ + +``tal:repeat`` syntax:: + + argument ::= variable_name expression + variable_name ::= Name + +Description +~~~~~~~~~~~ + +The ``tal:repeat`` statement replicates a sub-tree of your document +once for each item in a sequence. The expression should evaluate to a +sequence. If the sequence is empty, then the statement element is +deleted, otherwise it is repeated for each value in the sequence. If +the expression is ``default``, then the element is left unchanged, and +no new variables are defined. + +The ``variable_name`` is used to define a local variable and a repeat +variable. For each repetition, the local variable is set to the +current sequence element, and the repeat variable is set to an +iteration object. + +Repeat variables +~~~~~~~~~~~~~~~~~ + +You use repeat variables to access information about the current +repetition (such as the repeat index). The repeat variable has the +same name as the local variable, but is only accessible through the +built-in variable named ``repeat``. + +The following information is available from the repeat variable: + +================== ============== + Attribute Description +================== ============== +``index`` Repetition number, starting from zero. +``number`` Repetition number, starting from one. +``even`` True for even-indexed repetitions (0, 2, 4, ...). +``odd`` True for odd-indexed repetitions (1, 3, 5, ...). +``start`` True for the starting repetition (index 0). +``end`` True for the ending, or final, repetition. +``first`` True for the first item in a group - see note below +``last`` True for the last item in a group - see note below +``length`` Length of the sequence, which will be the total number of repetitions. +``letter`` Repetition number as a lower-case letter: "a" - "z", "aa" - "az", "ba" - "bz", ..., "za" - "zz", "aaa" - "aaz", and so forth. +``Letter`` Upper-case version of *letter*. +``roman`` Repetition number as a lower-case roman numeral: "i", "ii", "iii", "iv", "v", etc. +``Roman`` Upper-case version of *roman*. +================== ============== + +You can access the contents of the repeat variable using either +dictionary- or attribute-style access, e.g. ``repeat['item'].start`` +or ``repeat.item.start``. + +.. note:: For legacy compatibility, the attributes ``odd``, ``even``, ``number``, ``letter``, ``Letter``, ``roman``, and ``Roman`` are callable (returning ``self``). + +Note that ``first`` and ``last`` are intended for use with sorted +sequences. They try to divide the sequence into group of items with +the same value. + +Examples +~~~~~~~~ + +Iterating over a sequence of strings:: + + <p tal:repeat="txt ('one', 'two', 'three')"> + <span tal:replace="txt" /> + </p> + +Inserting a sequence of table rows, and using the repeat variable +to number the rows:: + + <table> + <tr tal:repeat="item here.cart"> + <td tal:content="repeat.item.number">1</td> + <td tal:content="item.description">Widget</td> + <td tal:content="item.price">$1.50</td> + </tr> + </table> + +Nested repeats:: + + <table border="1"> + <tr tal:repeat="row range(10)"> + <td tal:repeat="column range(10)"> + <span tal:define="x repeat.row.number; + y repeat.column.number; + z x * y" + tal:replace="string:$x * $y = $z">1 * 1 = 1</span> + </td> + </tr> + </table> + +Insert objects. Separates groups of objects by type by drawing a rule +between them:: + + <div tal:repeat="object objects"> + <h2 tal:condition="repeat.object.first.meta_type" + tal:content="object.type">Meta Type</h2> + <p tal:content="object.id">Object ID</p> + <hr tal:condition="object.last.meta_type" /> + </div> + +.. note:: the objects in the above example should already be sorted by + type. + +``tal:replace`` +^^^^^^^^^^^^^^^ + +Replaces an element. + +Syntax +~~~~~~ + +``tal:replace`` syntax:: + + argument ::= ['structure'] expression + +Description +~~~~~~~~~~~ + + +The ``tal:replace`` statement replaces an element with dynamic +content. It replaces the statement element with either text or a +structure (unescaped markup). The body of the statement is an +expression with an optional type prefix. The value of the expression +is converted into an escaped string unless you provide the 'structure' prefix. Escaping consists of converting ``&`` to +``&amp;``, ``<`` to ``&lt;``, and ``>`` to ``&gt;``. + +.. note:: If the inserted object provides an ``__html__`` method, that method is called with the result inserted as structure. This feature is not implemented by ZPT. + +If the expression evaluates to ``None``, the element is simply removed. If the value is ``default``, then the element is left unchanged. + +Examples +~~~~~~~~ + +Inserting a title:: + + <span tal:replace="context.title">Title</span> + +Inserting HTML/XML:: + + <div tal:replace="structure table" /> + +.. _tales: + +Expressions (TALES) +################### + +The *Template Attribute Language Expression Syntax* (TALES) standard +describes expressions that supply :ref:`tal` and +:ref:`metal` with data. TALES is *one* possible expression +syntax for these languages, but they are not bound to this definition. +Similarly, TALES could be used in a context having nothing to do with +TAL or METAL. + +TALES expressions are described below with any delimiter or quote +markup from higher language layers removed. Here is the basic +definition of TALES syntax:: + + Expression ::= [type_prefix ':'] String + type_prefix ::= Name + +Here are some simple examples:: + + 1 + 2 + None + string:Hello, ${view.user_name} + +The optional *type prefix* determines the semantics and syntax of the +*expression string* that follows it. A given implementation of TALES +can define any number of expression types, with whatever syntax you +like. It also determines which expression type is indicated by +omitting the prefix. + +Types +----- + +These are the available TALES expression types: + +============= ============== + Prefix Description +============= ============== +``exists`` Evaluate the result inside an exception handler; if one of the exceptions ``AttributeError``, ``LookupError``, ``TypeError``, ``NameError``, or ``KeyError`` is raised during evaluation, the result is ``False``, otherwise ``True``. Note that the original result is discarded in any case. +``import`` Import a global symbol using dotted notation. +``load`` Load a template relative to the current template or absolute. +``not`` Negate the expression result +``python`` Evaluate a Python expression +``string`` Format a string +``structure`` Wraps the expression result as *structure*. +============= ============== + +.. note:: The default expression type is ``python``. + +.. warning:: The Zope reference engine defaults to a ``path`` + expression type, which is closely tied to the Zope + framework. This expression is not implemented in + Chameleon (but it's available in a Zope framework + compatibility package). + +There's a mechanism to allow fallback to alternative expressions, if +one should fail (raise an exception). The pipe character ('|') is used +to separate two expressions:: + + <div tal:define="page request.GET['page'] | 0"> + +This mechanism applies only to the ``python`` expression type, and by +derivation ``string``. + +.. _tales_built_in_names: + +``python`` +^^^^^^^^^^ + +Evaluates a Python expression. + +Syntax +~~~~~~ + +Python expression syntax:: + + Any valid Python language expression + +Description +~~~~~~~~~~~ + +Python expressions are executed natively within the translated +template source code. There is no built-in security apparatus. + +``string`` +^^^^^^^^^^ + +Syntax +~~~~~~ + +String expression syntax:: + + string_expression ::= ( plain_string | [ varsub ] )* + varsub ::= ( '$' Variable ) | ( '${ Expression }' ) + plain_string ::= ( '$$' | non_dollar )* + non_dollar ::= any character except '$' + +Description +~~~~~~~~~~~ + +String expressions interpret the expression string as text. If no +expression string is supplied the resulting string is *empty*. The +string can contain variable substitutions of the form ``$name`` or +``${expression}``, where ``name`` is a variable name, and ``expression`` is a TALES-expression. The escaped string value of the expression is inserted into the string. + +.. note:: To prevent a ``$`` from being interpreted this + way, it must be escaped as ``$$``. + +Examples +~~~~~~~~ + +Basic string formatting:: + + <span tal:replace="string:$this and $that"> + Spam and Eggs + </span> + + <p tal:content="string:${request.form['total']}"> + total: 12 + </p> + +Including a dollar sign:: + + <p tal:content="string:$$$cost"> + cost: $42.00 + </p> + +.. _import-expression: + +``import`` +^^^^^^^^^^ + +Imports a module global. + +.. _structure-expression: + +``structure`` +^^^^^^^^^^^^^ + +Wraps the expression result as *structure*: The replacement text is +inserted into the document without escaping, allowing HTML/XML markup +to be inserted. This can break your page if the text contains +unanticipated markup (eg. text submitted via a web form), which is +the reason that it is not the default. + +.. _load-expression: + +``load`` +^^^^^^^^ + +Loads a template instance. + +Syntax +~~~~~~ + +Load expression syntax:: + + Relative or absolute file path + +Description +~~~~~~~~~~~ + +The template will be loaded using the same template class as the +calling template. + +Examples +~~~~~~~~ + +Loading a template and using it as a macro:: + + <div tal:define="master load: ../master.pt" metal:use-macro="master" /> + + +Built-in names +-------------- + +These are the names always available in the TALES expression namespace: + +- ``default`` - special value used to specify that existing text or attributes should not be replaced. See the documentation for individual TAL statements for details on how they interpret *default*. + +- ``repeat`` - the *repeat* variables; see :ref:`tal_repeat` for more + information. + +- ``template`` - reference to the template which was first called; this symbol is carried over when using macros. + +- ``macros`` - reference to the macros dictionary that corresponds to the current template. + + +.. _metal: + +Macros (METAL) +############## + +The *Macro Expansion Template Attribute Language* (METAL) standard is +a facility for HTML/XML macro preprocessing. It can be used in +conjunction with or independently of TAL and TALES. + +Macros provide a way to define a chunk of presentation in one +template, and share it in others, so that changes to the macro are +immediately reflected in all of the places that share it. +Additionally, macros are always fully expanded, even in a template's +source text, so that the template appears very similar to its final +rendering. + +A single Page Template can accomodate multiple macros. + +Namespace +--------- + +The METAL namespace URI and recommended alias are currently defined +as:: + + xmlns:metal="http://xml.zope.org/namespaces/metal" + +Just like the TAL namespace URI, this URI is not attached to a web +page; it's just a unique identifier. This identifier must be used in +all templates which use METAL. + +Statements +---------- + +METAL defines a number of statements: + +* ``metal:define-macro`` Define a macro. +* ``metal:use-macro`` Use a macro. +* ``metal:extend-macro`` Extend a macro. +* ``metal:define-slot`` Define a macro customization point. +* ``metal:fill-slot`` Customize a macro. + +Although METAL does not define the syntax of expression non-terminals, +leaving that up to the implementation, a canonical expression syntax +for use in METAL arguments is described in TALES Specification. + +``define-macro`` +^^^^^^^^^^^^^^^^ + +Defines a macro. + +Syntax +~~~~~~ + +``metal:define-macro`` syntax:: + + argument ::= Name + +Description +~~~~~~~~~~~ + +The ``metal:define-macro`` statement defines a macro. The macro is named +by the statement expression, and is defined as the element and its +sub-tree. + +Examples +~~~~~~~~ + +Simple macro definition:: + + <p metal:define-macro="copyright"> + Copyright 2011, <em>Foobar</em> Inc. + </p> + +``define-slot`` +^^^^^^^^^^^^^^^ + +Defines a macro customization point. + +Syntax +~~~~~~ + +``metal:define-slot`` syntax:: + + argument ::= Name + +Description +~~~~~~~~~~~ + +The ``metal:define-slot`` statement defines a macro customization +point or *slot*. When a macro is used, its slots can be replaced, in +order to customize the macro. Slot definitions provide default content +for the slot. You will get the default slot contents if you decide not +to customize the macro when using it. + +The ``metal:define-slot`` statement must be used inside a +``metal:define-macro`` statement. + +Slot names must be unique within a macro. + +Examples +~~~~~~~~ + +Simple macro with slot:: + + <p metal:define-macro="hello"> + Hello <b metal:define-slot="name">World</b> + </p> + +This example defines a macro with one slot named ``name``. When you use +this macro you can customize the ``b`` element by filling the ``name`` +slot. + +``fill-slot`` +^^^^^^^^^^^^^ + +Customize a macro. + +Syntax +~~~~~~ + +``metal:fill-slot`` syntax:: + + argument ::= Name + +Description +~~~~~~~~~~~ + +The ``metal:fill-slot`` statement customizes a macro by replacing a +*slot* in the macro with the statement element (and its content). + +The ``metal:fill-slot`` statement must be used inside a +``metal:use-macro`` statement. + +Slot names must be unique within a macro. + +If the named slot does not exist within the macro, the slot +contents will be silently dropped. + +Examples +~~~~~~~~ + +Given this macro:: + + <p metal:define-macro="hello"> + Hello <b metal:define-slot="name">World</b> + </p> + +You can fill the ``name`` slot like so:: + + <p metal:use-macro="container['master.html'].macros.hello"> + Hello <b metal:fill-slot="name">Kevin Bacon</b> + </p> + +``use-macro`` +^^^^^^^^^^^^^ + +Use a macro. + +Syntax +~~~~~~ + +``metal:use-macro`` syntax:: + + argument ::= expression + +Description +~~~~~~~~~~~ + +The ``metal:use-macro`` statement replaces the statement element with +a macro. The statement expression describes a macro definition. + +.. note:: In Chameleon the expression may point to a template instance; in this case it will be rendered in its entirety. + +``extend-macro`` +^^^^^^^^^^^^^^^^ + +Extends a macro. + +Syntax +~~~~~~ + +``metal:extend-macro`` syntax:: + + argument ::= expression + +Description +~~~~~~~~~~~ + +To extend an existing macro, choose a name for the macro and add a +define-macro attribute to a document element with the name as the +argument. Add an extend-macro attribute to the document element with +an expression referencing the base macro as the argument. The +extend-macro must be used in conjunction with define-macro, and must +not be used with use-macro. The element's subtree is the macro +body. + +Examples +~~~~~~~~ + +:: + + <div metal:define-macro="page-header" + metal:extend-macro="standard_macros['page-header']"> + <div metal:fill-slot="breadcrumbs"> + You are here: + <div metal:define-slot="breadcrumbs"/> + </div> + </div> + + +.. _i18n: + +Translation (I18N) +################## + +Translation of template contents and attributes is supported via the +``i18n`` namespace and message objects. + +Messages +-------- + +The translation machinery defines a message as *any object* which is +not a string or a number and which does not provide an ``__html__`` +method. + +When any such object is inserted into the template, the translate +function is invoked first to see if it needs translation. The result +is always coerced to a native string before it's inserted into the +template. + +Translation function +-------------------- + +The simplest way to hook into the translation machinery is to provide +a translation function to the template constructor or at +render-time. In either case it should be passed as the keyword +argument ``translate``. + +The function has the following signature: + +.. code-block:: python + + def translate(msgid, domain=None, mapping=None, context=None, target_language=None, default=None): + ... + +The result should be a string or ``None``. If another type of object +is returned, it's automatically coerced into a string. + +If `zope.i18n <http://pypi.python.org/pypi/zope.i18n>`_ is available, +the translation machinery defaults to using its translation +function. Note that this function requires messages to conform to the +message class from `zope.i18nmessageid +<http://pypi.python.org/pypi/zope.i18nmessageid>`_; specifically, +messages must have attributes ``domain``, ``mapping`` and +``default``. Example use: + +.. code-block:: python + + from zope.i18nmessageid import MessageFactory + _ = MessageFactory("food") + + apple = _(u"Apple") + +There's currently no further support for other translation frameworks. + +Using Zope's translation framework +----------------------------------- + +The translation function from ``zope.i18n`` relies on *translation +domains* to provide translations. + +These are components that are registered for some translation domain +identifier and which implement a ``translate`` method that translates +messages for that domain. + +.. note:: To register translation domain components, the Zope Component Architecture must be used (see `zope.component <http://pypi.python.org/pypi/zope.component>`_). + +The easiest way to configure translation domains is to use the the +``registerTranslations`` ZCML-directive; this requires the use of the +`zope.configuration <http://pypi.python.org/pypi/zope.configuration>`_ +package. This will set up translation domains and gettext catalogs +automatically: + +.. code-block:: xml + + <configure xmlns="http://namespaces.zope.org/zope" + xmlns:i18n="http://xml.zope.org/namespaces/i18n"> + + <i18n:registerTranslations directory="locales" /> + + </configure> + +The ``./locales`` directory must follow a particular directory +structure: + +.. code-block:: bash + + ./locales/en/LC_MESSAGES + ./locales/de/LC_MESSAGES + ... + +In each of the ``LC_MESSAGES`` directories, one `GNU gettext +<http://en.wikipedia.org/wiki/GNU_gettext>`_ file in the ``.po`` +format must be present per translation domain: + +.. code-block:: po + + # ./locales/de/LC_MESSAGES/food.po + + msgid "" + msgstr "" + "MIME-Version: 1.0\n" + "Content-Type: text/plain; charset=UTF-8\n" + "Content-Transfer-Encoding: 8bit\n" + + msgid "Apple" + msgstr "Apfel" + +It may be necessary to compile the message catalog using the +``msgfmt`` utility. This will produce a ``.mo`` file. + +Translation domains without gettext +----------------------------------- + +The following example demonstrates how to manually set up and +configure a translation domain for which messages are provided +directly:: + + from zope import component + from zope.i18n.simpletranslationdomain import SimpleTranslationDomain + + food = SimpleTranslationDomain("food", { + ('de', u'Apple'): u'Apfel', + }) + + component.provideUtility(food, food.domain) + +An example of a custom translation domain class:: + + from zope import interface + + class TranslationDomain(object): + interface.implements(ITranslationDomain) + + def translate(self, msgid, mapping=None, context=None, + target_language=None, default=None): + + ... + + component.provideUtility(TranslationDomain(), name="custom") + +This approach can be used to integrate other translation catalog +implementations. + +.. highlight:: xml + +Namespace +--------- + +The ``i18n`` namespace URI and recommended prefix are currently +defined as:: + + xmlns:i18n="http://xml.zope.org/namespaces/i18n" + +This is not a URL, but merely a unique identifier. Do not expect a +browser to resolve it successfully. + +Statements +---------- + +The allowable ``i18n`` statements are: + +- ``i18n:translate`` +- ``i18n:domain`` +- ``i18n:source`` +- ``i18n:target`` +- ``i18n:name`` +- ``i18n:attributes`` +- ``i18n:data`` + +``i18n:translate`` +^^^^^^^^^^^^^^^^^^ + +This attribute is used to mark units of text for translation. If this +attribute is specified with an empty string as the value, the message +ID is computed from the content of the element bearing this attribute. +Otherwise, the value of the element gives the message ID. + +``i18n:domain`` +^^^^^^^^^^^^^^^ + +The ``i18n:domain`` attribute is used to specify the domain to be used +to get the translation. If not specified, the translation services +will use a default domain. The value of the attribute is used +directly; it is not a TALES expression. + +``i18n:source`` +^^^^^^^^^^^^^^^ + +The ``i18n:source`` attribute specifies the language of the text to be +translated. The default is ``nothing``, which means we don't provide +this information to the translation services. + + +``i18n:target`` +^^^^^^^^^^^^^^^ + +The ``i18n:target`` attribute specifies the language of the +translation we want to get. If the value is ``default``, the language +negotiation services will be used to choose the destination language. +If the value is ``nothing``, no translation will be performed; this +can be used to suppress translation within a larger translated unit. +Any other value must be a language code. + +The attribute value is a TALES expression; the result of evaluating +the expression is the language code or one of the reserved values. + +.. note:: ``i18n:target`` is primarily used for hints to text + extraction tools and translation teams. If you had some text that + should only be translated to e.g. German, then it probably + shouldn't be wrapped in an ``i18n:translate`` span. + +``i18n:name`` +^^^^^^^^^^^^^ + +Name the content of the current element for use in interpolation +within translated content. This allows a replaceable component in +content to be re-ordered by translation. For example:: + + <span i18n:translate=''> + <span tal:replace='context.name' i18n:name='name' /> was born in + <span tal:replace='context.country_of_birth' i18n:name='country' />. + </span> + +would cause this text to be passed to the translation service:: + + "${name} was born in ${country}." + +``i18n:attributes`` +^^^^^^^^^^^^^^^^^^^ + +This attribute will allow us to translate attributes of HTML tags, +such as the ``alt`` attribute in the ``img`` tag. The +``i18n:attributes`` attribute specifies a list of attributes to be +translated with optional message IDs for each; if multiple attribute +names are given, they must be separated by semicolons. Message IDs +used in this context must not include whitespace. + +Note that the value of the particular attributes come either from the +HTML attribute value itself or from the data inserted by +``tal:attributes``. + +If an attibute is to be both computed using ``tal:attributes`` and +translated, the translation service is passed the result of the TALES +expression for that attribute. + +An example:: + + <img src="http://foo.com/logo" alt="Visit us" + tal:attributes="alt context.greeting" + i18n:attributes="alt" + > + +In this example, we let ``tal:attributes`` set the value of the ``alt`` +attribute to the text "Stop by for a visit!". This text will be +passed to the translation service, which uses the result of language +negotiation to translate "Stop by for a visit!" into the requested +language. The example text in the template, "Visit us", will simply +be discarded. + +Another example, with explicit message IDs:: + + <img src="../icons/uparrow.png" alt="Up" + i18n:attributes="src up-arrow-icon; alt up-arrow-alttext" + > + +Here, the message ID ``up-arrow-icon`` will be used to generate the +link to an icon image file, and the message ID 'up-arrow-alttext' will +be used for the "alt" text. + +``i18n:data`` +^^^^^^^^^^^^^ + +Since TAL always returns strings, we need a way in ZPT to translate +objects, one of the most obvious cases being ``datetime`` objects. The +``data`` attribute will allow us to specify such an object, and +``i18n:translate`` will provide us with a legal format string for that +object. If ``data`` is used, ``i18n:translate`` must be used to give +an explicit message ID, rather than relying on a message ID computed +from the content. + +Relation with TAL processing +---------------------------- + +The attributes defined in the ``i18n`` namespace modify the behavior +of the TAL interpreter for the ``tal:attributes``, ``tal:content``, +``tal:repeat``, and ``tal:replace`` attributes, but otherwise do not +affect TAL processing. + +Since these attributes only affect TAL processing by causing +translations to occur at specific times, using these with a TAL +processor which does not support the ``i18n`` namespace degrades well; +the structural expectations for a template which uses the ``i18n`` +support is no different from those for a page which does not. The +only difference is that translations will not be performed in a legacy +processor. + +Relation with METAL processing +------------------------------- + +When using translation with METAL macros, the internationalization +context is considered part of the specific documents that page +components are retrieved from rather than part of the combined page. +This makes the internationalization context lexical rather than +dynamic, making it easier for a site builder to understand the +behavior of each element with respect to internationalization. + +Let's look at an example to see what this means:: + + <html i18n:translate='' i18n:domain='EventsCalendar' + metal:use-macro="container['master.html'].macros.thismonth"> + + <div metal:fill-slot='additional-notes'> + <ol tal:condition="context.notes"> + <li tal:repeat="note context.notes"> + <tal:block tal:omit-tag="" + tal:condition="note.heading"> + <strong tal:content="note.heading"> + Note heading goes here + </strong> + <br /> + </tal:block> + <span tal:replace="note/description"> + Some longer explanation for the note goes here. + </span> + </li> + </ol> + </div> + + </html> + +And the macro source:: + + <html i18n:domain='CalendarService'> + <div tal:replace='python:DateTime().Month()' + i18n:translate=''>January</div> + + <!-- really hairy TAL code here ;-) --> + + <div define-slot="additional-notes"> + Place for the application to add additional notes if desired. + </div> + + </html> + +Note that the macro is using a different domain than the application +(which it should be). With lexical scoping, no special markup needs +to be applied to cause the slot-filler in the application to be part +of the same domain as the rest of the application's page components. +If dynamic scoping were used, the internationalization context would +need to be re-established in the slot-filler. + + +Extracting translatable message +------------------------------- + +Translators use `PO files +<http://www.gnu.org/software/hello/manual/gettext/PO-Files.html>`_ +when translating messages. To create and update PO files you need to +do two things: *extract* all messages from python and templates files +and store them in a ``.pot`` file, and for each language *update* its +``.po`` file. Chameleon facilitates this by providing extractors for +`Babel <http://babel.edgewall.org/>`_. To use this you need modify +``setup.py``. For example: + +.. code-block:: python + + from setuptools import setup + + setup(name="mypackage", + install_requires = [ + "Babel", + ], + message_extractors = { "src": [ + ("**.py", "chameleon_python", None ), + ("**.pt", "chameleon_xml", None ), + ]}, + ) + +This tells Babel to scan the ``src`` directory while using the +``chameleon_python`` extractor for all ``.py`` files and the +``chameleon_xml`` extractor for all ``.pt`` files. + +You can now use Babel to manage your PO files: + +.. code-block:: bash + + python setup.py extract_messages --output-file=i18n/mydomain.pot + python setup.py update_catalog \ + -l nl \ + -i i18n/mydomain.pot \ + -o i18n/nl/LC_MESSAGES/mydomain.po + python setup.py compile_catalog \ + --directory i18n --locale nl + +You can also configure default options in a ``setup.cfg`` file. For example:: + + [compile_catalog] + domain = mydomain + directory = i18n + + [extract_messages] + copyright_holder = Acme Inc. + output_file = i18n/mydomain.pot + charset = UTF-8 + + [init_catalog] + domain = mydomain + input_file = i18n/mydomain.pot + output_dir = i18n + + [update_catalog] + domain = mydomain + input_file = i18n/mydomain.pot + output_dir = i18n + previous = true + +You can now use the Babel commands directly:: + + python setup.py extract_messages + python setup.py update_catalog + python setup.py compile_catalog + + +${...} operator +############### + +The ``${...}`` notation is short-hand for text insertion. The +Python-expression inside the braces is evaluated and the result +included in the output (all inserted text is escaped by default): + +.. code-block:: html + + <div id="section-${index + 1}"> + ${content} + </div> + +To escape this behavior, prefix the notation with a backslash +character: ``\${...}``. + +Note that if an object implements the ``__html__`` method, the result +of this method will be inserted as-is (without XML escaping). + +Code blocks +########### + +The ``<?python ... ?>`` notation allows you to embed Python code in +templates: + +.. code-block:: html + + <div> + <?python numbers = map(str, range(1, 10)) ?> + Please input a number from the range ${", ".join(numbers)}. + </div> + +The scope of name assignments is up to the nearest macro definition, +or the template, if macros are not used. + +Note that code blocks can span multiple line and start on the next +line of where the processing instruction begins: + +.. code-block:: html + + <?python + foo = [1, 2, 3] + ?> + +You can use this to debug templates: + +.. code-block:: html + + <div> + <?python import pdb; pdb.set_trace() ?> + </div> + + +Markup comments +############### + +You can apply the "!" and "?" modifiers to change how comments are +processed: + +Drop + + ``<!--! This comment will be dropped from output -->`` + +Verbatim + + ``<!--? This comment will be included verbatim -->`` + + That is, evaluation of ``${...}`` expressions is disabled if the + comment opens with the "?" character. + + +.. _new-features: + +Language extensions +################### + +Chameleon extends the *page template* language with a new expression +types and language features. Some take inspiration from `Genshi +<http://genshi.edgewall.org/>`_. + + *New expression types* + + The :ref:`structure <structure-expression>` expression wraps an + expression result as *structure*:: + + <div>${structure: body.text}</div> + + The :ref:`import <import-expression>` expression imports module globals:: + + <div tal:define="compile import: re.compile"> + ... + </div> + + The :ref:`load <load-expression>` expression loads templates + relative to the current template:: + + <div tal:define="compile load: main.pt"> + ... + </div> + + *Tuple unpacking* + + The ``tal:define`` and ``tal:repeat`` statements support tuple + unpacking:: + + tal:define="(a, b, c) [1, 2, 3]" + + Extended `iterable unpacking + <http://www.python.org/dev/peps/pep-3132/>`_ using the asterisk + character is not currently supported (even for versions of + Python that support it natively). + + *Dictionary lookup as fallback after attribute error* + + If attribute lookup (using the ``obj.<name>`` syntax) raises an + ``AttributeError`` exception, a secondary lookup is attempted + using dictionary lookup --- ``obj['<name>']``. + + Behind the scenes, this is done by rewriting all + attribute-lookups to a custom lookup call: + + .. code-block:: python + + def lookup_attr(obj, key): + try: + return getattr(obj, key) + except AttributeError as exc: + try: + get = obj.__getitem__ + except AttributeError: + raise exc + try: + return get(key) + except KeyError: + raise exc + + *Inline string substitution* + + In element attributes and in the text or tail of an element, + string expression interpolation is available using the + ``${...}`` syntax:: + + <span class="content-${item_type}"> + ${title or item_id} + </span> + + *Code blocks* + + Using ``<?python ... ?>`` notation, you can embed Python + statements in your templates: + + .. code-block:: html + + <div> + <?python numbers = map(str, range(1, 10)) ?> + Please input a number from the range ${", ".join(numbers)}. + </div> + + *Literal content* + + While the ``tal:content`` and ``tal:repeat`` attributes both + support the ``structure`` keyword which inserts the content as + a literal (without XML-escape), an object may also provide an + ``__html__`` method to the same effect. + + The result of the method will be inserted as *structure*. + + This is particularly useful for content which is substituted + using the expression operator: ``"${...}"`` since the + ``structure`` keyword is not allowed here. + + *Switch statement* + + Two new attributes have been added: ``tal:switch`` and + ``tal:case``. A case attribute works like a condition and only + allows content if the value matches that of the nearest parent + switch value. + + +Incompatibilities and differences +################################# + +There are a number of incompatibilities and differences between the +Chameleon language implementation and the Zope reference +implementation (ZPT): + + *Default expression* + + The default expression type is Python. + + *Template arguments* + + Arguments passed by keyword to the render- or call method are + inserted directly into the template execution namespace. This is + different from ZPT where these are only available through the + ``options`` dictionary. + + Zope:: + + <div tal:content="options/title" /> + + Chameleon:: + + <div tal:content="title" /> + + *Special symbols* + + The ``CONTEXTS`` symbol is not available. + +The `z3c.pt <http://pypi.python.org/pypi/z3c.pt>`_ package works as a +compatibility layer. The template classes in this package provide a +implementation which is fully compatible with ZPT. + +Notes +##### + +.. [1] This has been changed in 2.x. Previously, it was up to the + expression engine to parse the expression values including any + semicolons and since for instance Python-expressions can never + end in a semicolon, it was possible to clearly distinguish + between the different uses of the symbol, e.g. + + :: + + tal:define="text 'Hello world; goodbye world'" + + The semicolon appearing in the definition above is part of the + Python-expression simply because it makes the expression + valid. Meanwhile: + + :: + + tal:define="text1 'Hello world'; text2 'goodbye world'" + + The semicolon here must denote a second variable definition + because there is no valid Python-expression that includes it. + + While this behavior works well in practice, it is incompatible + with the reference specification, and also blurs the interface + between the compiler and the expression engine. In 2.x we + therefore have to escape the semicolon by doubling it (as + defined by the specification): + + :: + + tal:define="text 'Hello world;; goodbye world'" + diff --git a/lib3/Chameleon-2.9.2/setup.cfg b/lib3/Chameleon-2.9.2/setup.cfg new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/setup.cfg @@ -0,0 +1,14 @@ +[easy_install] +zip_ok = false + +[nosetests] +match = ^test +nocapture = 1 +cover-package = tree.codegen, tree.lexer, tree.parser, tree.nodes, tree.translation, tree.language, tree.tales, tree.expressions +cover-erase = 1 + +[egg_info] +tag_build = +tag_date = 0 +tag_svn_revision = 0 + diff --git a/lib3/Chameleon-2.9.2/setup.py b/lib3/Chameleon-2.9.2/setup.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/setup.py @@ -0,0 +1,90 @@ +__version__ = '2.9.2' + +import os +import sys + +try: + from distribute_setup import use_setuptools + use_setuptools() +except: # doesn't work under tox/pip + pass + +from setuptools import setup, find_packages +from setuptools.command.test import test + +here = os.path.abspath(os.path.dirname(__file__)) +try: + README = open(os.path.join(here, 'README.rst')).read() + CHANGES = open(os.path.join(here, 'CHANGES.rst')).read() +except: # doesn't work under tox/pip + README = '' + CHANGES = '' + +install_requires = [] + +version = sys.version_info[:3] +if version < (2, 7, 0): + install_requires.append("ordereddict") + install_requires.append("unittest2") + + +class Benchmark(test): + description = "Run benchmarks" + user_options = [] + test_suite = None + + def initialize_options(self): + """init options""" + pass + + def finalize_options(self): + """finalize options""" + + self.distribution.tests_require = [ + 'zope.pagetemplate', + 'zope.component', + 'zope.i18n', + 'zope.testing'] + + def run(self): + test.run(self) + self.with_project_on_sys_path(self.run_benchmark) + + def run_benchmark(self): + from chameleon import benchmark + print("running benchmark...") + + benchmark.start() + +setup( + name="Chameleon", + version=__version__, + description="Fast HTML/XML Template Compiler.", + long_description="\n\n".join((README, CHANGES)), + classifiers=[ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 2.5", + "Programming Language :: Python :: 2.6", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.1", + "Programming Language :: Python :: 3.2", + ], + author="Malthe Borch", + author_email="mborch at gmail.com", + url="http://www.pagetemplates.org/", + license='BSD-like (http://repoze.org/license.html)', + packages=find_packages('src'), + package_dir = {'': 'src'}, + include_package_data=True, + install_requires=install_requires, + zip_safe=False, + test_suite="chameleon.tests", + cmdclass={ + 'benchmark': Benchmark, + } + ) + diff --git a/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/PKG-INFO b/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/PKG-INFO new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/PKG-INFO @@ -0,0 +1,1122 @@ +Metadata-Version: 1.1 +Name: Chameleon +Version: 2.9.2 +Summary: Fast HTML/XML Template Compiler. +Home-page: http://www.pagetemplates.org/ +Author: Malthe Borch +Author-email: mborch at gmail.com +License: BSD-like (http://repoze.org/license.html) +Description: Overview + ======== + + Chameleon is an HTML/XML template engine for `Python + <http://www.python.org>`_. It uses the *page templates* language. + + You can use it in any Python web application with just about any + version of Python (2.5 and up, including 3.x and `pypy + <http://pypy.org>`_). + + Visit the `website <http://pagetemplates.org>`_ for more information + or the `documentation <http://pagetemplates.org/docs/latest/>`_. + + License and Copyright + --------------------- + + This software is made available as-is under a BSD-like license [1]_ + (see included copyright notice). + + + Notes + ----- + + .. [1] This software is licensed under the `Repoze + <http://repoze.org/license.html>`_ license. + + + Changes + ======= + + 2.9.2 (2012-06-06) + ------------------ + + Bugfixes: + + - Fixed a PyPy incompatibility. + + - Fixed issue #109 which caused testing failures on some platforms. + + 2.9.1 (2012-06-01) + ------------------ + + Bugfixes: + + - Fixed issue #103. The ``tal:on-error`` statement now always adds an + explicit end-tag to the element, even with a substitution content of + nothing. + + - Fixed issue #113. The ``tal:on-error`` statement now works correctly + also for dynamic attributes. That is, the fallback tag now includes + only static attributes. + + - Fixed name error which prevented the benchmark from running + correctly. + + Compatibility: + + - Fixed deprecation warning on Python 3 for zope interface implements + declaration. This fixes issue #116. + + 2.9.0 (2012-05-31) + ------------------ + + Features: + + - The translation function now gets the ``econtext`` argument as the + value for ``context``. Note that historically, this was usually an + HTTP request which might provide language negotiation data through a + dictionary interface. + [alvinyue] + + Bugfixes: + + - Fixed import alias issue which would lead to a syntax error in + generated Python code. Fixes issue #114. + + 2.8.5 (2012-05-02) + ------------------ + + Bugfixes: + + - Fixed minor installation issues on Python 2.5 and 3. + [ppaez] + + - Ensure output is unicode even when trivial (an empty string). + + 2.8.4 (2012-04-18) + ------------------ + + Features: + + - In exception output, long filenames are now truncated to 60 + characters of output, preventing line wrap which makes it difficult + to scan the exception output. + + Bugfixes: + + - Include filename and location in exception output for exceptions + raised during compilation. + + - If a trivial translation substitution variable is given (i.e. an + empty string), simply ignore it. This fixes issue #106. + + 2.8.3 (2012-04-16) + ------------------ + + Features: + + - Log template source on debug-level before cooking. + + - The `target_language` argument, if given, is now available as a + variable in templates. + + 2.8.2 (2012-03-30) + ------------------ + + Features: + + - Temporary caches used in debug mode are cleaned up eagerly, rather + than waiting for process termination. + [mitchellrj] + + Bugfixes: + + - The `index`, `start` and `end` methods on the TAL repeat object are + now callable. This fixes an incompatibility with ZPT. + + - The loader now correctly handles absolute paths on Windows. + [rdale] + + 2.8.1 (2012-03-29) + ------------------ + + Features: + + - The exception formatter now lists errors in 'wrapping order'. This + means that the innermost, and presumably most relevant exception is + shown last. + + Bugfixes: + + - The exception formatter now correctly recognizes nested errors and + does not rewrap the dynamically generated exception class. + + - The exception formatter now correctly sets the ``__module__`` + attribute to that of the original exception class. + + 2.8.0 (2012-02-29) + ------------------ + + Features: + + - Added support for code blocks using the `<?python ... ?>` processing + instruction syntax. + + The scope is name assignments is up until the nearest macro + definition, or the template itself if macros are not used. + + Bugfixes: + + - Fall back to the exception class' ``__new__`` method to safely + create an exception object that is not implemented in Python. + + - The exception formatter now keeps track of already formatted + exceptions, and ignores them from further output. + + 2.7.4 (2012-02-27) + ------------------ + + - The error handler now invokes the ``__init__`` method of + ``BaseException`` instead of the possibly overriden method (which + may take required arguments). This fixes issue #97. + [j23d, malthe] + + 2.7.3 (2012-01-16) + ------------------ + + Bugfixes: + + - The trim whitespace option now correctly trims actual whitespace to + a single character, appearing either to the left or to the right of + an element prefix or suffix string. + + 2.7.2 (2012-01-08) + ------------------ + + Features: + + - Added option ``trim_attribute_space`` that decides whether attribute + whitespace is stripped (at most down to a single space). This option + exists to provide compatibility with the reference + implementation. Fixes issue #85. + + Bugfixes: + + - Ignore unhashable builtins when generating a reverse builtin + map to quickly look up a builtin value. + [malthe] + + - Apply translation mapping even when a translation function is not + available. This fixes issue #83. + [malthe] + + - Fixed issue #80. The translation domain for a slot is defined by the + source document, i.e. the template providing the content for a slot + whether it be the default or provided through ``metal:fill-slot``. + [jcbrand] + + - In certain circumstances, a Unicode non-breaking space character would cause + a define clause to fail to parse. + + 2.7.1 (2011-12-29) + ------------------ + + Features: + + - Enable expression interpolation in CDATA. + + - The page template class now implements dictionary access to macros:: + + template[name] + + This is a short-hand for:: + + template.macros[name] + + Bugfixes: + + - An invalid define clause would be silently ignored; we now raise a + language error exception. This fixes issue #79. + + - Fixed regression where ``${...}`` interpolation expressions could + not span multiple lines. This fixes issue #77. + + 2.7.0 (2011-12-13) + ------------------ + + Features: + + - The ``load:`` expression now derives from the string expression such + that the ``${...}`` operator can be used for expression + interpolation. + + - The ``load:`` expression now accepts asset specs; these are resolved + by the ``pkg_resources.resource_filename`` function:: + + <package_name>:<path> + + An example from the test suite:: + + chameleon:tests/inputs/hello_world.pt + + Bugfixes: + + - If an attribute name for translation was not a valid Python + identifier, the compiler would generate invalid code. This has been + fixed, and the compiler now also throws an exception if an attribute + specification contains a comma. (Note that the only valid separator + character is the semicolon, when specifying attributes for + translation via the ``i18n:translate`` statement). This addresses + issue #76. + + 2.6.2 (2011-12-08) + ------------------ + + Bugfixes: + + - Fixed issue where ``tal:on-error`` would not respect + ``tal:omit-tag`` or namespace elements which are omitted by default + (such as ``<tal:block />``). + + - Fixed issue where ``macros`` attribute would not be available on + file-based templates due to incorrect initialization. + + - The ``TryExcept`` and ``TryFinally`` AST nodes are not available on + Python 3.3. These have been aliased to ``Try``. This fixes issue + #75. + + Features: + + - The TAL repeat item now makes a security declaration that grants + access to unprotected subobjects on the Zope 2 platform:: + + __allow_access_to_unprotected_subobjects__ = True + + This is required for legacy compatibility and does not affect other + environments. + + - The template object now has a method ``write(body)`` which + explicitly decodes and cooks a string input. + + - Added configuration option ``loader_class`` which sets the class + used to create the template loader object. + + The class (essentially a callable) is created at template + construction time. + + 2.6.1 (2011-11-30) + ------------------ + + Bugfixes: + + - Decode HTML entities in expression interpolation strings. This fixes + issue #74. + + - Allow ``xml`` and ``xmlns`` attributes on TAL, I18N and METAL + namespace elements. This fixes issue #73. + + 2.6.0 (2011-11-24) + ------------------ + + Features: + + - Added support for implicit translation: + + The ``implicit_i18n_translate`` option enables implicit translation + of text. The ``implicit_i18n_attributes`` enables implicit + translation of attributes. The latter must be a set and for an + attribute to be implicitly translated, its lowercase string value + must be included in the set. + + - Added option ``strict`` (enabled by default) which decides whether + expressions are required to be valid at compile time. That is, if + not set, an exception is only raised for an invalid expression at + evaluation time. + + - An expression error now results in an exception only if the + expression is attempted evaluated during a rendering. + + - Added a configuration option ``prepend_relative_search_path`` which + decides whether the path relative to a file-based template is + prepended to the load search path. The default is ``True``. + + - Added a configuration option ``search_path`` to the file-based + template class, which adds additional paths to the template load + instance bound to the ``load:`` expression. The option takes a + string path or an iterable yielding string paths. The default value + is the empty set. + + Bugfixes: + + - Exception instances now support pickle/unpickle. + + - An attributes in i18n:attributes no longer needs to match an + existing or dynamic attribute in order to appear in the + element. This fixes issue #66. + + 2.5.3 (2011-10-23) + ------------------ + + Bugfixes: + + - Fixed an issue where a nested macro slot definition would fail even + though there existed a parent macro definition. This fixes issue + #69. + + 2.5.2 (2011-10-12) + ------------------ + + Bugfixes: + + - Fixed an issue where technically invalid input would result in a + compiler error. + + Features: + + - The markup class now inherits from the unicode string type such that + it's compatible with the string interface. + + 2.5.1 (2011-09-29) + ------------------ + + Bugfixes: + + - The symbol names "convert", "decode" and "translate" are now no + longer set as read-only *compiler internals*. This fixes issue #65. + + - Fixed an issue where a macro extension chain nested two levels (a + template uses a macro that extends a macro) would lose the middle + slot definitions if slots were defined nested. + + The compiler now throws an error if a nested slot definition is used + outside a macro extension context. + + 2.5.0 (2011-09-23) + ------------------ + + Features: + + - An expression type ``structure:`` is now available which wraps the + expression result as *structure* such that it is not escaped on + insertion, e.g.:: + + <div id="content"> + ${structure: context.body} + </div> + + This also means that the ``structure`` keyword for ``tal:content`` + and ``tal:replace`` now has an alternative spelling via the + expression type ``structure:``. + + - The string-based template constructor now accepts encoded input. + + 2.4.6 (2011-09-23) + ------------------ + + Bugfixes: + + - The ``tal:on-error`` statement should catch all exceptions. + + - Fixed issue that would prevent escaping of interpolation expression + values appearing in text. + + 2.4.5 (2011-09-21) + ------------------ + + Bugfixes: + + - The ``tal:on-error`` handler should have a ``error`` variable + defined that has the value of the exception thrown. + + - The ``tal:on-error`` statement is a substitution statement and + should support the "text" and "structure" insertion methods. + + 2.4.4 (2011-09-15) + ------------------ + + Bugfixes: + + - An encoding specified in the XML document preamble is now read and + used to decode the template input to unicode. This fixes issue #55. + + - Encoded expression input on Python 3 is now correctly + decoded. Previously, the string representation output would be + included instead of an actually decoded string. + + - Expression result conversion steps are now correctly included in + error handling such that the exception output points to the + expression location. + + 2.4.3 (2011-09-13) + ------------------ + + Features: + + - When an encoding is provided, pass the 'ignore' flag to avoid + decoding issues with bad input. + + Bugfixes: + + - Fixed pypy compatibility issue (introduced in previous release). + + 2.4.2 (2011-09-13) + ------------------ + + Bugfixes: + + - Fixed an issue in the compiler where an internal variable (such as a + translation default value) would be cached, resulting in variable + scope corruption (see issue #49). + + 2.4.1 (2011-09-08) + ------------------ + + Bugfixes: + + - Fixed an issue where a default value for an attribute would + sometimes spill over into another attribute. + + - Fixed issue where the use of the ``default`` name in an attribute + interpolation expression would print the attribute value. This is + unexpected, because it's an expression, not a static text suitable + for output. An attribute value of ``default`` now correctly drops + the attribute. + + 2.4.0 (2011-08-22) + ------------------ + + Features: + + - Added an option ``boolean_attributes`` to evaluate and render a + provided set of attributes using a boolean logic: if the attribute + is a true value, the value will be the attribute name, otherwise the + attribute is dropped. + + In the reference implementation, the following attributes are + configured as boolean values when the template is rendered in + HTML-mode:: + + "compact", "nowrap", "ismap", "declare", "noshade", + "checked", "disabled", "readonly", "multiple", "selected", + "noresize", "defer" + + Note that in Chameleon, these attributes must be manually provided. + + Bugfixes: + + - The carriage return character (used on Windows platforms) would + incorrectly be included in Python comments. + + It is now replaced with a line break. + + This fixes issue #44. + + 2.3.8 (2011-08-19) + ------------------ + + - Fixed import error that affected Python 2.5 only. + + 2.3.7 (2011-08-19) + ------------------ + + Features: + + - Added an option ``literal_false`` that disables the default behavior + of dropping an attribute for a value of ``False`` (in addition to + ``None``). This modified behavior is the behavior exhibited in + reference implementation. + + Bugfixes: + + - Undo attribute special HTML attribute behavior (see previous + release). + + This turned out not to be a compatible behavior; rather, boolean + values should simply be coerced to a string. + + Meanwhile, the reference implementation does support an HTML mode in + which the special attribute behavior is exhibited. + + We do not currently support this mode. + + 2.3.6 (2011-08-18) + ------------------ + + Features: + + - Certain HTML attribute names now have a special behavior for a + attribute value of ``True`` (or ``default`` if no default is + defined). For these attributes, this return value will result in the + name being printed as the value:: + + <input type="input" tal:attributes="checked True" /> + + will be rendered as:: + + <input type="input" checked="checked" /> + + This behavior is compatible with the reference implementation. + + 2.3.5 (2011-08-18) + ------------------ + + Features: + + - Added support for the set operator (``{item, item, ...}``). + + Bugfixes: + + - If macro is defined on the same element as a translation name, this + no longer results in a "translation name not allowed outside + translation" error. This fixes issue #43. + + - Attribute fallback to dictionary lookup now works on multiple items + (e.g. ``d1.d2.d2``). This fixes issue #42. + + 2.3.4 (2011-08-16) + ------------------ + + Features: + + - When inserting content in either attributes or text, a value of + ``True`` (like ``False`` and ``None``) will result in no + action. + + - Use statically assigned variables for ``"attrs"`` and + ``"default"``. This change yields a performance improvement of + 15-20%. + + - The template loader class now accepts an optional argument + ``default_extension`` which accepts a filename extension which will + be appended to the filename if there's not already an extension. + + Bugfixes: + + - The default symbol is now ``True`` for an attribute if the attribute + default is not provided. Note that the result is that the attribute + is dropped. This fixes issue #41. + + - Fixed an issue where assignment to a variable ``"type"`` would + fail. This fixes issue #40. + + - Fixed an issue where an (unsuccesful) assignment for a repeat loop + to a compiler internal name would not result in an error. + + - If the translation function returns the identical object, manually + coerce it to string. This fixes a compatibility issue with + translation functions which do not convert non-string objects to a + string value, but simply return them unchanged. + + 2.3.3 (2011-08-15) + ------------------ + + Features: + + - The ``load:`` expression now passes the initial keyword arguments to + its template loader (e.g. ``auto_reload`` and ``encoding``). + + - In the exception output, string variable values are now limited to a + limited output of characters, single line only. + + Bugfixes: + + - Fixed horizontal alignment of exception location info + (i.e. 'String:', 'Filename:' and 'Location:') such that they match + the template exception formatter. + + 2.3.2 (2011-08-11) + ------------------ + + Bugfixes: + + - Fixed issue where i18n:domain would not be inherited through macros + and slots. This fixes issue #37. + + 2.3.1 (2011-08-11) + ------------------ + + Features: + + - The ``Builtin`` node type may now be used to represent any Python + local or global name. This allows expression compilers to refer to + e.g. ``get`` or ``getitem``, or to explicit require a builtin object + such as one from the ``extra_builtins`` dictionary. + + Bugfixes: + + - Builtins which are not explicitly disallowed may now be redefined + and used as variables (e.g. ``nothing``). + + - Fixed compiler issue with circular node annotation loop. + + 2.3 (2011-08-10) + ---------------- + + Features: + + - Added support for the following syntax to disable inline evaluation + in a comment: + + <!--? comment appears verbatim (no ${...} evaluation) --> + + Note that the initial question mark character (?) will be omitted + from output. + + - The parser now accepts '<' and '>' in attributes. Note that this is + invalid markup. Previously, the '<' would not be accepted as a valid + attribute value, but this would result in an 'unexpected end tag' + error elsewhere. This fixes issue #38. + + - The expression compiler now provides methods ``assign_text`` and + ``assign_value`` such that a template engine might configure this + value conversion to support e.g. encoded strings. + + Note that currently, the only client for the ``assign_text`` method + is the string expression type. + + - Enable template loader for string-based template classes. Note that + the ``filename`` keyword argument may be provided on initialization + to identify the template source by filename. This fixes issue #36. + + - Added ``extra_builtins`` option to the page template class. These + builtins are added to the default builtins dictionary at cook time + and may be provided at initialization using the ``extra_builtins`` + keyword argument. + + Bugfixes: + + - If a translation domain is set for a fill slot, use this setting + instead of the macro template domain. + + - The Python expression compiler now correctly decodes HTML entities + ``'gt'`` and ``'lt'``. This fixes issue #32. + + - The string expression compiler now correctly handles encoded text + (when support for encoded strings is enabled). This fixes issue #35. + + - Fixed an issue where setting the ``filename`` attribute on a + file-based template would not automatically cause an invalidation. + + - Exceptions raised by Chameleon can now be copied via + ``copy.copy``. This fixes issue #36. + [leorochael] + + - If copying the exception fails in the exception handler, simply + re-raise the original exception and log a warning. + + 2.2 (2011-07-28) + ---------------- + + Features: + + - Added new expression type ``load:`` that allows loading a + template. Both relative and absolute paths are supported. If the + path given is relative, then it will be resolved with respect to the + directory of the template. + + - Added support for dynamic evaluation of expressions. + + Note that this is to support legacy applications. It is not + currently wired into the provided template classes. + + - Template classes now have a ``builtins`` attribute which may be used + to define built-in variables always available in the template + variable scope. + + Incompatibilities: + + - The file-based template class no longer accepts a parameter + ``loader``. This parameter would be used to load a template from a + relative path, using a ``find(filename)`` method. This was however, + undocumented, and probably not very useful since we have the + ``TemplateLoader`` mechanism already. + + - The compiled template module now contains an ``initialize`` function + which takes values that map to the template builtins. The return + value of this function is a dictionary that contains the render + functions. + + Bugfixes: + + - The file-based template class no longer verifies the existance of a + template file (using ``os.lstat``). This now happens implicitly if + eager parsing is enabled, or otherwise when first needed (e.g. at + render time). + + This is classified as a bug fix because the previous behavior was + probably not what you'd expect, especially if an application + initializes a lot of templates without needing to render them + immediately. + + 2.1.1 (2011-07-28) + ------------------ + + Features: + + - Improved exception display. The expression string is now shown in + the context of the original source (if available) with a marker + string indicating the location of the expression in the template + source. + + Bugfixes: + + - The ``structure`` insertion mode now correctly decodes entities for + any expression type (including ``string:``). This fixes issue #30. + + - Don't show internal variables in the exception formatter variable + listing. + + 2.1 (2011-07-25) + ---------------- + + Features: + + - Expression interpolation (using the ``${...}`` operator and + previously also ``$identifier``) now requires braces everywhere + except inside the ``string:`` expression type. + + This change is motivated by a number of legacy templates in which + the interpolation format without braces ``$identifier`` appears as + text. + + 2.0.2 (2011-07-25) + ------------------ + + Bugfixes: + + - Don't use dynamic variable scope for lambda-scoped variables (#27). + + - Avoid duplication of exception class and message in traceback. + + - Fixed issue where a ``metal:fill-slot`` would be ignored if a macro + was set to be used on the same element (#16). + + 2.0.1 (2011-07-23) + ------------------ + + Bugfixes: + + - Fixed issue where global variable definition from macro slots would + fail (they would instead be local). This also affects error + reporting from inside slots because this would be recorded + internally as a global. + + - Fixed issue with template cache digest (used for filenames); modules + are now invalidated whenever any changes are made to the + distribution set available (packages on ``sys.path``). + + - Fixed exception handler to better let exceptions propagate through + the renderer. + + - The disk-based module compiler now mangles template source filenames + such that the output Python module is valid and at root level (dots + and hyphens are replaced by an underscore). This fixes issue #17. + + - Fixed translations (i18n) on Python 2.5. + + 2.0 (2011-07-14) + ---------------- + + - Point release. + + 2.0-rc14 (2011-07-13) + --------------------- + + Bugfixes: + + - The tab character (``\t``) is now parsed correctly when used inside + tags. + + Features: + + - The ``RepeatDict`` class now works as a proxy behind a seperate + dictionary instance. + + - Added template constructor option ``keep_body`` which is a flag + (also available as a class attribute) that controls whether to save + the template body input in the ``body`` attribute. + + This is disabled by default, unless debug-mode is enabled. + + - The page template loader class now accepts an optional ``formats`` + argument which can be used to select an alternative template class. + + 2.0-rc13 (2011-07-07) + --------------------- + + Bugfixes: + + - The backslash character (followed by optional whitespace and a line + break) was not correctly interpreted as a continuation for Python + expressions. + + Features: + + - The Python expression implementation is now more flexible for + external subclassing via a new ``parse`` method. + + 2.0-rc12 (2011-07-04) + --------------------- + + Bugfixes: + + - Initial keyword arguments passed to a template now no longer "leak" + into the template variable space after a macro call. + + - An unexpected end tag is now an unrecoverable error. + + Features: + + - Improve exception output. + + 2.0-rc11 (2011-05-26) + --------------------- + + Bugfixes: + + - Fixed issue where variable names that begin with an underscore were + seemingly allowed, but their use resulted in a compiler error. + + Features: + + - Template variable names are now allowed to be prefixed with a single + underscore, but not two or more (reserved for internal use). + + Examples of valid names:: + + item + ITEM + _item + camelCase + underscore_delimited + help + + - Added support for Genshi's comment "drop" syntax:: + + <!--! This comment will be dropped --> + + Note the additional exclamation (!) character. + + This fixes addresses issue #10. + + 2.0-rc10 (2011-05-24) + --------------------- + + Bugfixes: + + - The ``tal:attributes`` statement now correctly operates + case-insensitive. The attribute name given in the statement will + replace an existing attribute with the same name, without respect to + case. + + Features: + + - Added ``meta:interpolation`` statement to control expression + interpolation setting. + + Strings that disable the setting: ``"off"`` and ``"false"``. + Strings that enable the setting: ``"on"`` and ``"true"``. + + - Expression interpolation now works inside XML comments. + + 2.0-rc9 (2011-05-05) + -------------------- + + Features: + + - Better debugging support for string decode and conversion. If a + naive join fails, each element in the output will now be attempted + coerced to unicode to try and trigger the failure near to the bad + string. + + 2.0-rc8 (2011-04-11) + -------------------- + + Bugfixes: + + - If a macro defines two slots with the same name, a caller will now + fill both with a single usage. + + - If a valid of ``None`` is provided as the translation function + argument, we now fall back to the class default. + + 2.0-rc7 (2011-03-29) + -------------------- + + Bugfixes: + + - Fixed issue with Python 2.5 compatibility AST. This affected at + least PyPy 1.4. + + Features: + + - The ``auto_reload`` setting now defaults to the class value; the + base template class gives a default value of + ``chameleon.config.AUTO_RELOAD``. This change allows a subclass to + provide a custom default value (such as an application-specific + debug mode setting). + + + 2.0-rc6 (2011-03-19) + -------------------- + + Features: + + - Added support for ``target_language`` keyword argument to render + method. If provided, the argument will be curried onto the + translation function. + + Bugfixes: + + - The HTML entities 'lt', 'gt' and 'quot' appearing inside content + subtition expressions are now translated into their native character + values. This fixes an issue where you could not dynamically create + elements using the ``structure`` (which is possible in ZPT). The + need to create such structure stems from the lack of an expression + interpolation operator in ZPT. + + - Fixed duplicate file pointer issue with test suite (affected Windows + platforms only). This fixes issue #9. + [oliora] + + - Use already open file using ``os.fdopen`` when trying to write out + the module source. This fixes LP #731803. + + + 2.0-rc5 (2011-03-07) + -------------------- + + Bugfixes: + + - Fixed a number of issues concerning the escaping of attribute + values: + + 1) Static attribute values are now included as they appear in the + source. + + This means that invalid attribute values such as ``"true && + false"`` are now left alone. It's not the job of the template + engine to correct such markup, at least not in the default mode + of operation. + + 2) The string expression compiler no longer unescapes + values. Instead, this is left to each expression + compiler. Currently only the Python expression compiler unescapes + its input. + + 3) The dynamic escape code sequence now correctly only replaces + ampersands that are part of an HTML escape format. + + Imports: + + - The page template classes and the loader class can now be imported + directly from the ``chameleon`` module. + + Features: + + - If a custom template loader is not provided, relative paths are now + resolved using ``os.abspath`` (i.e. to the current working + directory). + + - Absolute paths are normalized using ``os.path.normpath`` and + ``os.path.expanduser``. This ensures that all paths are kept in + their "canonical" form. + + + 2.0-rc4 (2011-03-03) + -------------------- + + Bugfixes: + + - Fixed an issue where the output of an end-to-end string expression + would raise an exception if the expression evaluated to ``None`` (it + should simply output nothing). + + - The ``convert`` function (which is configurable on the template + class level) now defaults to the ``translate`` function (at + run-time). + + This fixes an issue where message objects were not translated (and + thus converted to a string) using the a provided ``translate`` + function. + + - Fixed string interpolation issue where an expression immediately + succeeded by a right curly bracket would not parse. + + This fixes issue #5. + + - Fixed error where ``tal:condition`` would be evaluated after + ``tal:repeat``. + + Features: + + - Python expression is now a TALES expression. That means that the + pipe operator can be used to chain two or more expressions in a + try-except sequence. + + This behavior was ported from the 1.x series. Note that while it's + still possible to use the pipe character ("|") in an expression, it + must now be escaped. + + - The template cache can now be shared by multiple processes. + + + 2.0-rc3 (2011-03-02) + -------------------- + + Bugfixes: + + - Fixed ``atexit`` handler. + + This fixes issue #3. + + - If a cache directory is specified, it will now be used even when not + in debug mode. + + - Allow "comment" attribute in the TAL namespace. + + This fixes an issue in the sense that the reference engine allows + any attribute within the TAL namespace. However, only "comment" is + in common use. + + - The template constructor now accepts a flag ``debug`` which puts the + template *instance* into debug-mode regardless of the global + setting. + + This fixes issue #1. + + Features: + + - Added exception handler for exceptions raised while evaluating an + expression. + + This handler raises (or attempts to) a new exception of the type + ``RenderError``, with an additional base class of the original + exception class. The string value of the exception is a formatted + error message which includes the expression that caused the + exception. + + If we are unable to create the exception class, the original + exception is re-raised. + + 2.0-rc2 (2011-02-28) + -------------------- + + - Fixed upload issue. + + 2.0-rc1 (2011-02-28) + -------------------- + + - Initial public release. See documentation for what's new in this + series. + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 diff --git a/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/SOURCES.txt b/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/SOURCES.txt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/SOURCES.txt @@ -0,0 +1,380 @@ +.gitignore +CHANGES.rst +COPYRIGHT.txt +LICENSE.txt +Makefile +README.rst +distribute_setup.py +setup.cfg +setup.py +tox.ini +benchmarks/bm_chameleon.py +benchmarks/bm_mako.py +benchmarks/util.py +docs/conf.py +docs/configuration.rst +docs/index.rst +docs/integration.rst +docs/library.rst +docs/reference.rst +src/Chameleon.egg-info/PKG-INFO +src/Chameleon.egg-info/SOURCES.txt +src/Chameleon.egg-info/dependency_links.txt +src/Chameleon.egg-info/not-zip-safe +src/Chameleon.egg-info/top_level.txt +src/chameleon/__init__.py +src/chameleon/ast24.py +src/chameleon/astutil.py +src/chameleon/benchmark.py +src/chameleon/codegen.py +src/chameleon/compiler.py +src/chameleon/config.py +src/chameleon/exc.py +src/chameleon/i18n.py +src/chameleon/interfaces.py +src/chameleon/loader.py +src/chameleon/metal.py +src/chameleon/namespaces.py +src/chameleon/nodes.py +src/chameleon/parser.py +src/chameleon/program.py +src/chameleon/py25.py +src/chameleon/py26.py +src/chameleon/tal.py +src/chameleon/tales.py +src/chameleon/template.py +src/chameleon/tokenize.py +src/chameleon/utils.py +src/chameleon/tests/__init__.py +src/chameleon/tests/test_doctests.py +src/chameleon/tests/test_loader.py +src/chameleon/tests/test_parser.py +src/chameleon/tests/test_sniffing.py +src/chameleon/tests/test_templates.py +src/chameleon/tests/test_tokenizer.py +src/chameleon/tests/inputs/001-interpolation.txt +src/chameleon/tests/inputs/001-variable-scope.html +src/chameleon/tests/inputs/001-variable-scope.pt +src/chameleon/tests/inputs/001.xml +src/chameleon/tests/inputs/002-repeat-scope.pt +src/chameleon/tests/inputs/002.xml +src/chameleon/tests/inputs/003-content.pt +src/chameleon/tests/inputs/003.xml +src/chameleon/tests/inputs/004-attributes.pt +src/chameleon/tests/inputs/004.xml +src/chameleon/tests/inputs/005-default.pt +src/chameleon/tests/inputs/005.xml +src/chameleon/tests/inputs/006-attribute-interpolation.pt +src/chameleon/tests/inputs/006.xml +src/chameleon/tests/inputs/007-content-interpolation.pt +src/chameleon/tests/inputs/007.xml +src/chameleon/tests/inputs/008-builtins.pt +src/chameleon/tests/inputs/008.xml +src/chameleon/tests/inputs/009-literals.pt +src/chameleon/tests/inputs/009.xml +src/chameleon/tests/inputs/010-structure.pt +src/chameleon/tests/inputs/010.xml +src/chameleon/tests/inputs/011-messages.pt +src/chameleon/tests/inputs/011.xml +src/chameleon/tests/inputs/012-translation.pt +src/chameleon/tests/inputs/012.xml +src/chameleon/tests/inputs/013-repeat-nested.pt +src/chameleon/tests/inputs/013.xml +src/chameleon/tests/inputs/014-repeat-nested-similar.pt +src/chameleon/tests/inputs/014.xml +src/chameleon/tests/inputs/015-translation-nested.pt +src/chameleon/tests/inputs/015.xml +src/chameleon/tests/inputs/016-explicit-translation.pt +src/chameleon/tests/inputs/016.xml +src/chameleon/tests/inputs/017-omit-tag.pt +src/chameleon/tests/inputs/017.xml +src/chameleon/tests/inputs/018-translation-nested-dynamic.pt +src/chameleon/tests/inputs/018.xml +src/chameleon/tests/inputs/019-replace.pt +src/chameleon/tests/inputs/019.xml +src/chameleon/tests/inputs/020-on-error.pt +src/chameleon/tests/inputs/020.xml +src/chameleon/tests/inputs/021-translation-domain.pt +src/chameleon/tests/inputs/021.xml +src/chameleon/tests/inputs/022-switch.pt +src/chameleon/tests/inputs/022.xml +src/chameleon/tests/inputs/023-condition.pt +src/chameleon/tests/inputs/023.xml +src/chameleon/tests/inputs/024-namespace-elements.pt +src/chameleon/tests/inputs/024.xml +src/chameleon/tests/inputs/025-repeat-whitespace.pt +src/chameleon/tests/inputs/025.xml +src/chameleon/tests/inputs/026-repeat-variable.pt +src/chameleon/tests/inputs/026.xml +src/chameleon/tests/inputs/027-attribute-replacement.pt +src/chameleon/tests/inputs/027.xml +src/chameleon/tests/inputs/028-attribute-toggle.pt +src/chameleon/tests/inputs/028.xml +src/chameleon/tests/inputs/029-attribute-ordering.pt +src/chameleon/tests/inputs/029.xml +src/chameleon/tests/inputs/030-repeat-tuples.pt +src/chameleon/tests/inputs/030.xml +src/chameleon/tests/inputs/031-namespace-with-tal.pt +src/chameleon/tests/inputs/031.xml +src/chameleon/tests/inputs/032-master-template.pt +src/chameleon/tests/inputs/032.xml +src/chameleon/tests/inputs/033-use-macro-trivial.pt +src/chameleon/tests/inputs/033.xml +src/chameleon/tests/inputs/034-use-template-as-macro.pt +src/chameleon/tests/inputs/034.xml +src/chameleon/tests/inputs/035-use-macro-with-fill-slot.pt +src/chameleon/tests/inputs/035.xml +src/chameleon/tests/inputs/036-use-macro-inherits-dynamic-scope.pt +src/chameleon/tests/inputs/036.xml +src/chameleon/tests/inputs/037-use-macro-local-variable-scope.pt +src/chameleon/tests/inputs/037.xml +src/chameleon/tests/inputs/038-use-macro-globals.pt +src/chameleon/tests/inputs/038.xml +src/chameleon/tests/inputs/039-globals.pt +src/chameleon/tests/inputs/039.xml +src/chameleon/tests/inputs/040-macro-using-template-symbol.pt +src/chameleon/tests/inputs/040.xml +src/chameleon/tests/inputs/041-translate-nested-names.pt +src/chameleon/tests/inputs/041.xml +src/chameleon/tests/inputs/042-use-macro-fill-footer.pt +src/chameleon/tests/inputs/042.xml +src/chameleon/tests/inputs/043-macro-nested-dynamic-vars.pt +src/chameleon/tests/inputs/043.xml +src/chameleon/tests/inputs/044-tuple-define.pt +src/chameleon/tests/inputs/044.xml +src/chameleon/tests/inputs/045-namespaces.pt +src/chameleon/tests/inputs/045.xml +src/chameleon/tests/inputs/046-extend-macro.pt +src/chameleon/tests/inputs/046.xml +src/chameleon/tests/inputs/047-use-extended-macro.pt +src/chameleon/tests/inputs/047.xml +src/chameleon/tests/inputs/048-use-extended-macro-fill-original.pt +src/chameleon/tests/inputs/048.xml +src/chameleon/tests/inputs/049-entities-in-attributes.pt +src/chameleon/tests/inputs/049.xml +src/chameleon/tests/inputs/050-define-macro-and-use-not-extend.pt +src/chameleon/tests/inputs/050.xml +src/chameleon/tests/inputs/051-use-non-extended-macro.pt +src/chameleon/tests/inputs/051.xml +src/chameleon/tests/inputs/052-i18n-domain-inside-filled-slot.pt +src/chameleon/tests/inputs/052.xml +src/chameleon/tests/inputs/053-special-characters-in-attributes.pt +src/chameleon/tests/inputs/053.xml +src/chameleon/tests/inputs/054-import-expression.pt +src/chameleon/tests/inputs/054.xml +src/chameleon/tests/inputs/055-attribute-fallback-to-dict-lookup.pt +src/chameleon/tests/inputs/055.xml +src/chameleon/tests/inputs/056-comment-attribute.pt +src/chameleon/tests/inputs/056.xml +src/chameleon/tests/inputs/057-order.pt +src/chameleon/tests/inputs/057.xml +src/chameleon/tests/inputs/058-script.pt +src/chameleon/tests/inputs/058.xml +src/chameleon/tests/inputs/059-embedded-javascript.pt +src/chameleon/tests/inputs/059.xml +src/chameleon/tests/inputs/060-macro-with-multiple-same-slots.pt +src/chameleon/tests/inputs/060.xml +src/chameleon/tests/inputs/061-fill-one-slot-but-two-defined.pt +src/chameleon/tests/inputs/061.xml +src/chameleon/tests/inputs/062-comments-and-expressions.pt +src/chameleon/tests/inputs/062.xml +src/chameleon/tests/inputs/063-continuation.pt +src/chameleon/tests/inputs/063.xml +src/chameleon/tests/inputs/064-tags-and-special-characters.pt +src/chameleon/tests/inputs/064.xml +src/chameleon/tests/inputs/065-use-macro-in-fill.pt +src/chameleon/tests/inputs/065.xml +src/chameleon/tests/inputs/066-load-expression.pt +src/chameleon/tests/inputs/066.xml +src/chameleon/tests/inputs/067-attribute-decode.pt +src/chameleon/tests/inputs/067.xml +src/chameleon/tests/inputs/068-less-than-greater-than-in-attributes.pt +src/chameleon/tests/inputs/068.xml +src/chameleon/tests/inputs/069-translation-domain-and-macro.pt +src/chameleon/tests/inputs/069.xml +src/chameleon/tests/inputs/070-translation-domain-and-use-macro.pt +src/chameleon/tests/inputs/070.xml +src/chameleon/tests/inputs/071-html-attribute-defaults.pt +src/chameleon/tests/inputs/071.xml +src/chameleon/tests/inputs/072-repeat-interpolation.pt +src/chameleon/tests/inputs/072.xml +src/chameleon/tests/inputs/073-utf8-encoded.pt +src/chameleon/tests/inputs/073.xml +src/chameleon/tests/inputs/074-encoded-template.pt +src/chameleon/tests/inputs/074.xml +src/chameleon/tests/inputs/075-nested-macros.pt +src/chameleon/tests/inputs/075.xml +src/chameleon/tests/inputs/076-nested-macro-override.pt +src/chameleon/tests/inputs/076.xml +src/chameleon/tests/inputs/077-i18n-attributes.pt +src/chameleon/tests/inputs/077.xml +src/chameleon/tests/inputs/078-tags-and-newlines.pt +src/chameleon/tests/inputs/078.xml +src/chameleon/tests/inputs/079-implicit-i18n.pt +src/chameleon/tests/inputs/079.xml +src/chameleon/tests/inputs/080-xmlns-namespace-on-tal.pt +src/chameleon/tests/inputs/080.xml +src/chameleon/tests/inputs/081-load-spec.pt +src/chameleon/tests/inputs/081.xml +src/chameleon/tests/inputs/082-load-spec-computed.pt +src/chameleon/tests/inputs/082.xml +src/chameleon/tests/inputs/083-template-dict-to-macro.pt +src/chameleon/tests/inputs/083.xml +src/chameleon/tests/inputs/084-interpolation-in-cdata.pt +src/chameleon/tests/inputs/084.xml +src/chameleon/tests/inputs/085-nested-translation.pt +src/chameleon/tests/inputs/085.xml +src/chameleon/tests/inputs/086-self-closing.pt +src/chameleon/tests/inputs/086.xml +src/chameleon/tests/inputs/087-code-blocks.pt +src/chameleon/tests/inputs/087.xml +src/chameleon/tests/inputs/088-python-newlines.pt +src/chameleon/tests/inputs/088.xml +src/chameleon/tests/inputs/089.xml +src/chameleon/tests/inputs/090.xml +src/chameleon/tests/inputs/091.xml +src/chameleon/tests/inputs/092.xml +src/chameleon/tests/inputs/093.xml +src/chameleon/tests/inputs/094.xml +src/chameleon/tests/inputs/095.xml +src/chameleon/tests/inputs/096.xml +src/chameleon/tests/inputs/097.xml +src/chameleon/tests/inputs/098.xml +src/chameleon/tests/inputs/099.xml +src/chameleon/tests/inputs/100.xml +src/chameleon/tests/inputs/101-unclosed-tags.html +src/chameleon/tests/inputs/101.xml +src/chameleon/tests/inputs/102-unquoted-attributes.html +src/chameleon/tests/inputs/102.xml +src/chameleon/tests/inputs/103-simple-attribute.html +src/chameleon/tests/inputs/103.xml +src/chameleon/tests/inputs/104.xml +src/chameleon/tests/inputs/105.xml +src/chameleon/tests/inputs/106.xml +src/chameleon/tests/inputs/107.xml +src/chameleon/tests/inputs/108.xml +src/chameleon/tests/inputs/109.xml +src/chameleon/tests/inputs/110.xml +src/chameleon/tests/inputs/111.xml +src/chameleon/tests/inputs/112.xml +src/chameleon/tests/inputs/113.xml +src/chameleon/tests/inputs/114.xml +src/chameleon/tests/inputs/115.xml +src/chameleon/tests/inputs/116.xml +src/chameleon/tests/inputs/117.xml +src/chameleon/tests/inputs/118.xml +src/chameleon/tests/inputs/119.xml +src/chameleon/tests/inputs/greeting.pt +src/chameleon/tests/inputs/hello_world.pt +src/chameleon/tests/inputs/hello_world.txt +src/chameleon/tests/outputs/001.html +src/chameleon/tests/outputs/001.pt +src/chameleon/tests/outputs/001.txt +src/chameleon/tests/outputs/002.pt +src/chameleon/tests/outputs/003.pt +src/chameleon/tests/outputs/004.pt +src/chameleon/tests/outputs/005.pt +src/chameleon/tests/outputs/006.pt +src/chameleon/tests/outputs/007.pt +src/chameleon/tests/outputs/008.pt +src/chameleon/tests/outputs/009.pt +src/chameleon/tests/outputs/010.pt +src/chameleon/tests/outputs/011-en.pt +src/chameleon/tests/outputs/011.pt +src/chameleon/tests/outputs/012-en.pt +src/chameleon/tests/outputs/012.pt +src/chameleon/tests/outputs/013.pt +src/chameleon/tests/outputs/014.pt +src/chameleon/tests/outputs/015-en.pt +src/chameleon/tests/outputs/015.pt +src/chameleon/tests/outputs/016-en.pt +src/chameleon/tests/outputs/016.pt +src/chameleon/tests/outputs/017.pt +src/chameleon/tests/outputs/018-en.pt +src/chameleon/tests/outputs/018.pt +src/chameleon/tests/outputs/019.pt +src/chameleon/tests/outputs/020.pt +src/chameleon/tests/outputs/021-en.pt +src/chameleon/tests/outputs/021.pt +src/chameleon/tests/outputs/022.pt +src/chameleon/tests/outputs/023.pt +src/chameleon/tests/outputs/024.pt +src/chameleon/tests/outputs/025.pt +src/chameleon/tests/outputs/026.pt +src/chameleon/tests/outputs/027.pt +src/chameleon/tests/outputs/028.pt +src/chameleon/tests/outputs/029.pt +src/chameleon/tests/outputs/030.pt +src/chameleon/tests/outputs/031.pt +src/chameleon/tests/outputs/032.pt +src/chameleon/tests/outputs/033.pt +src/chameleon/tests/outputs/034.pt +src/chameleon/tests/outputs/035.pt +src/chameleon/tests/outputs/036.pt +src/chameleon/tests/outputs/037.pt +src/chameleon/tests/outputs/038.pt +src/chameleon/tests/outputs/039.pt +src/chameleon/tests/outputs/040.pt +src/chameleon/tests/outputs/041.pt +src/chameleon/tests/outputs/042.pt +src/chameleon/tests/outputs/043.pt +src/chameleon/tests/outputs/044.pt +src/chameleon/tests/outputs/045.pt +src/chameleon/tests/outputs/046.pt +src/chameleon/tests/outputs/047.pt +src/chameleon/tests/outputs/048.pt +src/chameleon/tests/outputs/049.pt +src/chameleon/tests/outputs/050.pt +src/chameleon/tests/outputs/051.pt +src/chameleon/tests/outputs/052.pt +src/chameleon/tests/outputs/053.pt +src/chameleon/tests/outputs/054.pt +src/chameleon/tests/outputs/055.pt +src/chameleon/tests/outputs/056.pt +src/chameleon/tests/outputs/057.pt +src/chameleon/tests/outputs/058.pt +src/chameleon/tests/outputs/059.pt +src/chameleon/tests/outputs/060.pt +src/chameleon/tests/outputs/061.pt +src/chameleon/tests/outputs/062.pt +src/chameleon/tests/outputs/063.pt +src/chameleon/tests/outputs/064.pt +src/chameleon/tests/outputs/065.pt +src/chameleon/tests/outputs/066.pt +src/chameleon/tests/outputs/067.pt +src/chameleon/tests/outputs/068.pt +src/chameleon/tests/outputs/069-en.pt +src/chameleon/tests/outputs/069.pt +src/chameleon/tests/outputs/070-en.pt +src/chameleon/tests/outputs/070.pt +src/chameleon/tests/outputs/071.pt +src/chameleon/tests/outputs/072.pt +src/chameleon/tests/outputs/073.pt +src/chameleon/tests/outputs/074.pt +src/chameleon/tests/outputs/075.pt +src/chameleon/tests/outputs/076.pt +src/chameleon/tests/outputs/077-en.pt +src/chameleon/tests/outputs/077.pt +src/chameleon/tests/outputs/078.pt +src/chameleon/tests/outputs/079-en.pt +src/chameleon/tests/outputs/079.pt +src/chameleon/tests/outputs/080.pt +src/chameleon/tests/outputs/081.pt +src/chameleon/tests/outputs/082.pt +src/chameleon/tests/outputs/083.pt +src/chameleon/tests/outputs/084.pt +src/chameleon/tests/outputs/085-en.pt +src/chameleon/tests/outputs/085.pt +src/chameleon/tests/outputs/086.pt +src/chameleon/tests/outputs/087.pt +src/chameleon/tests/outputs/088.pt +src/chameleon/tests/outputs/101.html +src/chameleon/tests/outputs/102.html +src/chameleon/tests/outputs/103.html +src/chameleon/tests/outputs/greeting.pt +src/chameleon/tests/outputs/hello_world.pt +src/chameleon/tests/outputs/hello_world.txt +src/chameleon/zpt/__init__.py +src/chameleon/zpt/loader.py +src/chameleon/zpt/program.py +src/chameleon/zpt/template.py \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/dependency_links.txt b/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/dependency_links.txt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/dependency_links.txt @@ -0,0 +1,1 @@ + diff --git a/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/not-zip-safe b/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/not-zip-safe new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/not-zip-safe @@ -0,0 +1,1 @@ + diff --git a/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/top_level.txt b/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/top_level.txt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/Chameleon.egg-info/top_level.txt @@ -0,0 +1,1 @@ +chameleon diff --git a/lib3/Chameleon-2.9.2/src/chameleon/__init__.py b/lib3/Chameleon-2.9.2/src/chameleon/__init__.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/__init__.py @@ -0,0 +1,5 @@ +from .zpt.template import PageTemplate +from .zpt.template import PageTemplateFile +from .zpt.template import PageTextTemplate +from .zpt.template import PageTextTemplateFile +from .zpt.loader import TemplateLoader as PageTemplateLoader diff --git a/lib3/Chameleon-2.9.2/src/chameleon/ast24.py b/lib3/Chameleon-2.9.2/src/chameleon/ast24.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/ast24.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2008 by Armin Ronacher. +# License: Python License. +# + +import _ast + +from _ast import * + + +def fix_missing_locations(node): + """ + When you compile a node tree with compile(), the compiler expects lineno and + col_offset attributes for every node that supports them. This is rather + tedious to fill in for generated nodes, so this helper adds these attributes + recursively where not already set, by setting them to the values of the + parent node. It works recursively starting at *node*. + """ + def _fix(node, lineno, col_offset): + if 'lineno' in node._attributes: + if not hasattr(node, 'lineno'): + node.lineno = lineno + else: + lineno = node.lineno + if 'col_offset' in node._attributes: + if not hasattr(node, 'col_offset'): + node.col_offset = col_offset + else: + col_offset = node.col_offset + for child in iter_child_nodes(node): + _fix(child, lineno, col_offset) + _fix(node, 1, 0) + return node + + +def iter_child_nodes(node): + """ + Yield all direct child nodes of *node*, that is, all fields that are nodes + and all items of fields that are lists of nodes. + """ + for name, field in iter_fields(node): + if isinstance(field, (AST, _ast.AST)): + yield field + elif isinstance(field, list): + for item in field: + if isinstance(item, (AST, _ast.AST)): + yield item + + +def iter_fields(node): + """ + Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields`` + that is present on *node*. + """ + + for field in node._fields or (): + try: + yield field, getattr(node, field) + except AttributeError: + pass + + +def walk(node): + """ + Recursively yield all child nodes of *node*, in no specified order. This is + useful if you only want to modify nodes in place and don't care about the + context. + """ + from collections import deque + todo = deque([node]) + while todo: + node = todo.popleft() + todo.extend(iter_child_nodes(node)) + yield node + + +class NodeVisitor(object): + """ + A node visitor base class that walks the abstract syntax tree and calls a + visitor function for every node found. This function may return a value + which is forwarded by the `visit` method. + + This class is meant to be subclassed, with the subclass adding visitor + methods. + + Per default the visitor functions for the nodes are ``'visit_'`` + + class name of the node. So a `TryFinally` node visit function would + be `visit_TryFinally`. This behavior can be changed by overriding + the `visit` method. If no visitor function exists for a node + (return value `None`) the `generic_visit` visitor is used instead. + + Don't use the `NodeVisitor` if you want to apply changes to nodes during + traversing. For this a special visitor exists (`NodeTransformer`) that + allows modifications. + """ + + def visit(self, node): + """Visit a node.""" + method = 'visit_' + node.__class__.__name__ + visitor = getattr(self, method, self.generic_visit) + return visitor(node) + + def generic_visit(self, node): + """Called if no explicit visitor function exists for a node.""" + for field, value in iter_fields(node): + if isinstance(value, list): + for item in value: + if isinstance(item, (AST, _ast.AST)): + self.visit(item) + elif isinstance(value, (AST, _ast.AST)): + self.visit(value) + + +class AST(object): + _fields = () + _attributes = 'lineno', 'col_offset' + + def __init__(self, *args, **kwargs): + self.__dict__.update(kwargs) + self._fields = self._fields or () + for name, value in zip(self._fields, args): + setattr(self, name, value) + + +for name, cls in _ast.__dict__.items(): + if isinstance(cls, type) and issubclass(cls, _ast.AST): + try: + cls.__bases__ = (AST, ) + cls.__bases__ + except TypeError: + pass + + +class ExceptHandler(AST): + _fields = "type", "name", "body" diff --git a/lib3/Chameleon-2.9.2/src/chameleon/astutil.py b/lib3/Chameleon-2.9.2/src/chameleon/astutil.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/astutil.py @@ -0,0 +1,926 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2008-2009 Edgewall Software +# All rights reserved. +# +# This software is licensed as described in the file COPYING, which +# you should have received as part of this distribution. The terms +# are also available at http://genshi.edgewall.org/wiki/License. +# +# This software consists of voluntary contributions made by many +# individuals. For the exact contribution history, see the revision +# history and logs, available at http://genshi.edgewall.org/log/. + +"""Support classes for generating code from abstract syntax trees.""" + +try: + import ast +except ImportError: + from chameleon import ast24 as ast + +import sys +import logging +import weakref + +node_annotations = weakref.WeakKeyDictionary() + +try: + node_annotations[ast.Name()] = None +except TypeError: + logging.debug( + "Unable to create weak references to AST nodes. " \ + "A lock will be used around compilation loop." + ) + + node_annotations = {} + +__docformat__ = 'restructuredtext en' + + +def annotated(value): + node = load("annotation") + node_annotations[node] = value + return node + + +def parse(source, mode='eval'): + return compile(source, '', mode, ast.PyCF_ONLY_AST) + + +def load(name): + return ast.Name(id=name, ctx=ast.Load()) + + +def store(name): + return ast.Name(id=name, ctx=ast.Store()) + + +def param(name): + return ast.Name(id=name, ctx=ast.Param()) + + +def delete(name): + return ast.Name(id=name, ctx=ast.Del()) + + +def subscript(name, value, ctx): + return ast.Subscript( + value=value, + slice=ast.Index(value=ast.Str(s=name)), + ctx=ctx, + ) + + +def walk_names(target, mode): + for node in ast.walk(target): + if isinstance(node, ast.Name) and \ + isinstance(node.ctx, mode): + yield node.id + + +def copy(source, target): + target.__class__ = source.__class__ + target.__dict__ = source.__dict__ + + +def swap(root, replacement, name): + for node in ast.walk(root): + if (isinstance(node, ast.Name) and + isinstance(node.ctx, ast.Load) and + node.id == name): + assert hasattr(replacement, '_fields') + node_annotations.setdefault(node, replacement) + + +def marker(name): + return ast.Str(s="__%s" % name) + + +class Node(object): + """AST baseclass that gives us a convenient initialization + method. We explicitly declare and use the ``_fields`` attribute.""" + + _fields = () + + def __init__(self, *args, **kwargs): + assert isinstance(self._fields, tuple) + self.__dict__.update(kwargs) + for name, value in zip(self._fields, args): + setattr(self, name, value) + + def __repr__(self): + """Poor man's single-line pretty printer.""" + + name = type(self).__name__ + return '<%s%s at %x>' % ( + name, + "".join(" %s=%r" % (name, getattr(self, name, "\"?\"")) + for name in self._fields), + id(self) + ) + + +class Builtin(Node): + """Represents a Python builtin. + + Used when a builtin is used internally by the compiler, to avoid + clashing with a user assignment (e.g. ``help`` is a builtin, but + also commonly assigned in templates). + """ + + _fields = "id", "ctx" + + ctx = ast.Load() + + +class Symbol(Node): + """Represents an importable symbol.""" + + _fields = "value", + + +class Static(Node): + """Represents a static value.""" + + _fields = "value", "name" + + name = None + + +class Comment(Node): + _fields = "text", "space", "stmt" + + stmt = None + space = "" + + +class ASTCodeGenerator(object): + """General purpose base class for AST transformations. + + Every visitor method can be overridden to return an AST node that has been + altered or replaced in some way. + """ + + def __init__(self, tree): + self.lines_info = [] + self.line_info = [] + self.lines = [] + self.line = "" + self.last = None + self.indent = 0 + self.blame_stack = [] + self.visit(tree) + + if self.line.strip(): + self._new_line() + + self.line = None + self.line_info = None + + # strip trivial lines + self.code = "\n".join( + line.strip() and line or "" + for line in self.lines + ) + + def _change_indent(self, delta): + self.indent += delta + + def _new_line(self): + if self.line is not None: + self.lines.append(self.line) + self.lines_info.append(self.line_info) + self.line = ' ' * 4 * self.indent + if len(self.blame_stack) == 0: + self.line_info = [] + self.last = None + else: + self.line_info = [(0, self.blame_stack[-1],)] + self.last = self.blame_stack[-1] + + def _write(self, s): + if len(s) == 0: + return + if len(self.blame_stack) == 0: + if self.last is not None: + self.last = None + self.line_info.append((len(self.line), self.last)) + else: + if self.last != self.blame_stack[-1]: + self.last = self.blame_stack[-1] + self.line_info.append((len(self.line), self.last)) + self.line += s + + def flush(self): + if self.line: + self._new_line() + + def visit(self, node): + if node is None: + return None + if type(node) is tuple: + return tuple([self.visit(n) for n in node]) + try: + self.blame_stack.append((node.lineno, node.col_offset,)) + info = True + except AttributeError: + info = False + visitor = getattr(self, 'visit_%s' % node.__class__.__name__, None) + if visitor is None: + raise Exception('No handler for ``%s`` (%s).' % ( + node.__class__.__name__, repr(node))) + ret = visitor(node) + if info: + self.blame_stack.pop() + return ret + + def visit_Module(self, node): + for n in node.body: + self.visit(n) + visit_Interactive = visit_Module + visit_Suite = visit_Module + + def visit_Expression(self, node): + return self.visit(node.body) + + # arguments = (expr* args, identifier? vararg, + # identifier? kwarg, expr* defaults) + def visit_arguments(self, node): + first = True + no_default_count = len(node.args) - len(node.defaults) + for i, arg in enumerate(node.args): + if not first: + self._write(', ') + else: + first = False + self.visit(arg) + if i >= no_default_count: + self._write('=') + self.visit(node.defaults[i - no_default_count]) + if getattr(node, 'vararg', None): + if not first: + self._write(', ') + else: + first = False + self._write('*' + node.vararg) + if getattr(node, 'kwarg', None): + if not first: + self._write(', ') + else: + first = False + self._write('**' + node.kwarg) + + def visit_arg(self, node): + self._write(node.arg) + + # FunctionDef(identifier name, arguments args, + # stmt* body, expr* decorators) + def visit_FunctionDef(self, node): + self._new_line() + for decorator in getattr(node, 'decorator_list', ()): + self._new_line() + self._write('@') + self.visit(decorator) + self._new_line() + self._write('def ' + node.name + '(') + self.visit(node.args) + self._write('):') + self._change_indent(1) + for statement in node.body: + self.visit(statement) + self._change_indent(-1) + + # ClassDef(identifier name, expr* bases, stmt* body) + def visit_ClassDef(self, node): + self._new_line() + self._write('class ' + node.name) + if node.bases: + self._write('(') + self.visit(node.bases[0]) + for base in node.bases[1:]: + self._write(', ') + self.visit(base) + self._write(')') + self._write(':') + self._change_indent(1) + for statement in node.body: + self.visit(statement) + self._change_indent(-1) + + # Return(expr? value) + def visit_Return(self, node): + self._new_line() + self._write('return') + if getattr(node, 'value', None): + self._write(' ') + self.visit(node.value) + + # Delete(expr* targets) + def visit_Delete(self, node): + self._new_line() + self._write('del ') + self.visit(node.targets[0]) + for target in node.targets[1:]: + self._write(', ') + self.visit(target) + + # Assign(expr* targets, expr value) + def visit_Assign(self, node): + self._new_line() + for target in node.targets: + self.visit(target) + self._write(' = ') + self.visit(node.value) + + # AugAssign(expr target, operator op, expr value) + def visit_AugAssign(self, node): + self._new_line() + self.visit(node.target) + self._write(' ' + self.binary_operators[node.op.__class__] + '= ') + self.visit(node.value) + + # Print(expr? dest, expr* values, bool nl) + def visit_Print(self, node): + self._new_line() + self._write('print') + if getattr(node, 'dest', None): + self._write(' >> ') + self.visit(node.dest) + if getattr(node, 'values', None): + self._write(', ') + else: + self._write(' ') + if getattr(node, 'values', None): + self.visit(node.values[0]) + for value in node.values[1:]: + self._write(', ') + self.visit(value) + if not node.nl: + self._write(',') + + # For(expr target, expr iter, stmt* body, stmt* orelse) + def visit_For(self, node): + self._new_line() + self._write('for ') + self.visit(node.target) + self._write(' in ') + self.visit(node.iter) + self._write(':') + self._change_indent(1) + for statement in node.body: + self.visit(statement) + self._change_indent(-1) + if getattr(node, 'orelse', None): + self._new_line() + self._write('else:') + self._change_indent(1) + for statement in node.orelse: + self.visit(statement) + self._change_indent(-1) + + # While(expr test, stmt* body, stmt* orelse) + def visit_While(self, node): + self._new_line() + self._write('while ') + self.visit(node.test) + self._write(':') + self._change_indent(1) + for statement in node.body: + self.visit(statement) + self._change_indent(-1) + if getattr(node, 'orelse', None): + self._new_line() + self._write('else:') + self._change_indent(1) + for statement in node.orelse: + self.visit(statement) + self._change_indent(-1) + + # If(expr test, stmt* body, stmt* orelse) + def visit_If(self, node): + self._new_line() + self._write('if ') + self.visit(node.test) + self._write(':') + self._change_indent(1) + for statement in node.body: + self.visit(statement) + self._change_indent(-1) + if getattr(node, 'orelse', None): + self._new_line() + self._write('else:') + self._change_indent(1) + for statement in node.orelse: + self.visit(statement) + self._change_indent(-1) + + # With(expr context_expr, expr? optional_vars, stmt* body) + def visit_With(self, node): + self._new_line() + self._write('with ') + self.visit(node.context_expr) + if getattr(node, 'optional_vars', None): + self._write(' as ') + self.visit(node.optional_vars) + self._write(':') + self._change_indent(1) + for statement in node.body: + self.visit(statement) + self._change_indent(-1) + + # Raise(expr? type, expr? inst, expr? tback) + def visit_Raise(self, node): + self._new_line() + self._write('raise') + if not getattr(node, "type", None): + exc = getattr(node, "exc", None) + if exc is None: + return + self._write(' ') + return self.visit(exc) + self._write(' ') + self.visit(node.type) + if not node.inst: + return + self._write(', ') + self.visit(node.inst) + if not node.tback: + return + self._write(', ') + self.visit(node.tback) + + # Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) + def visit_Try(self, node): + self._new_line() + self._write('try:') + self._change_indent(1) + for statement in node.body: + self.visit(statement) + self._change_indent(-1) + if getattr(node, 'handlers', None): + for handler in node.handlers: + self.visit(handler) + self._new_line() + + if getattr(node, 'orelse', None): + self._write('else:') + self._change_indent(1) + for statement in node.orelse: + self.visit(statement) + self._change_indent(-1) + + if getattr(node, 'finalbody', None): + self._new_line() + self._write('finally:') + self._change_indent(1) + for statement in node.finalbody: + self.visit(statement) + self._change_indent(-1) + + # TryExcept(stmt* body, excepthandler* handlers, stmt* orelse) + def visit_TryExcept(self, node): + self._new_line() + self._write('try:') + self._change_indent(1) + for statement in node.body: + self.visit(statement) + self._change_indent(-1) + if getattr(node, 'handlers', None): + for handler in node.handlers: + self.visit(handler) + self._new_line() + if getattr(node, 'orelse', None): + self._write('else:') + self._change_indent(1) + for statement in node.orelse: + self.visit(statement) + self._change_indent(-1) + + # excepthandler = (expr? type, expr? name, stmt* body) + def visit_ExceptHandler(self, node): + self._new_line() + self._write('except') + if getattr(node, 'type', None): + self._write(' ') + self.visit(node.type) + if getattr(node, 'name', None): + if sys.version_info[0] == 2: + assert getattr(node, 'type', None) + self._write(', ') + else: + self._write(' as ') + self.visit(node.name) + self._write(':') + self._change_indent(1) + for statement in node.body: + self.visit(statement) + self._change_indent(-1) + visit_excepthandler = visit_ExceptHandler + + # TryFinally(stmt* body, stmt* finalbody) + def visit_TryFinally(self, node): + self._new_line() + self._write('try:') + self._change_indent(1) + for statement in node.body: + self.visit(statement) + self._change_indent(-1) + + if getattr(node, 'finalbody', None): + self._new_line() + self._write('finally:') + self._change_indent(1) + for statement in node.finalbody: + self.visit(statement) + self._change_indent(-1) + + # Assert(expr test, expr? msg) + def visit_Assert(self, node): + self._new_line() + self._write('assert ') + self.visit(node.test) + if getattr(node, 'msg', None): + self._write(', ') + self.visit(node.msg) + + def visit_alias(self, node): + self._write(node.name) + if getattr(node, 'asname', None): + self._write(' as ') + self._write(node.asname) + + # Import(alias* names) + def visit_Import(self, node): + self._new_line() + self._write('import ') + self.visit(node.names[0]) + for name in node.names[1:]: + self._write(', ') + self.visit(name) + + # ImportFrom(identifier module, alias* names, int? level) + def visit_ImportFrom(self, node): + self._new_line() + self._write('from ') + if node.level: + self._write('.' * node.level) + self._write(node.module) + self._write(' import ') + self.visit(node.names[0]) + for name in node.names[1:]: + self._write(', ') + self.visit(name) + + # Exec(expr body, expr? globals, expr? locals) + def visit_Exec(self, node): + self._new_line() + self._write('exec ') + self.visit(node.body) + if not node.globals: + return + self._write(', ') + self.visit(node.globals) + if not node.locals: + return + self._write(', ') + self.visit(node.locals) + + # Global(identifier* names) + def visit_Global(self, node): + self._new_line() + self._write('global ') + self.visit(node.names[0]) + for name in node.names[1:]: + self._write(', ') + self.visit(name) + + # Expr(expr value) + def visit_Expr(self, node): + self._new_line() + self.visit(node.value) + + # Pass + def visit_Pass(self, node): + self._new_line() + self._write('pass') + + # Break + def visit_Break(self, node): + self._new_line() + self._write('break') + + # Continue + def visit_Continue(self, node): + self._new_line() + self._write('continue') + + ### EXPRESSIONS + def with_parens(f): + def _f(self, node): + self._write('(') + f(self, node) + self._write(')') + return _f + + bool_operators = {ast.And: 'and', ast.Or: 'or'} + + # BoolOp(boolop op, expr* values) + @with_parens + def visit_BoolOp(self, node): + joiner = ' ' + self.bool_operators[node.op.__class__] + ' ' + self.visit(node.values[0]) + for value in node.values[1:]: + self._write(joiner) + self.visit(value) + + binary_operators = { + ast.Add: '+', + ast.Sub: '-', + ast.Mult: '*', + ast.Div: '/', + ast.Mod: '%', + ast.Pow: '**', + ast.LShift: '<<', + ast.RShift: '>>', + ast.BitOr: '|', + ast.BitXor: '^', + ast.BitAnd: '&', + ast.FloorDiv: '//' + } + + # BinOp(expr left, operator op, expr right) + @with_parens + def visit_BinOp(self, node): + self.visit(node.left) + self._write(' ' + self.binary_operators[node.op.__class__] + ' ') + self.visit(node.right) + + unary_operators = { + ast.Invert: '~', + ast.Not: 'not', + ast.UAdd: '+', + ast.USub: '-', + } + + # UnaryOp(unaryop op, expr operand) + def visit_UnaryOp(self, node): + self._write(self.unary_operators[node.op.__class__] + ' ') + self.visit(node.operand) + + # Lambda(arguments args, expr body) + @with_parens + def visit_Lambda(self, node): + self._write('lambda ') + self.visit(node.args) + self._write(': ') + self.visit(node.body) + + # IfExp(expr test, expr body, expr orelse) + @with_parens + def visit_IfExp(self, node): + self.visit(node.body) + self._write(' if ') + self.visit(node.test) + self._write(' else ') + self.visit(node.orelse) + + # Dict(expr* keys, expr* values) + def visit_Dict(self, node): + self._write('{') + for key, value in zip(node.keys, node.values): + self.visit(key) + self._write(': ') + self.visit(value) + self._write(', ') + self._write('}') + + def visit_Set(self, node): + self._write('{') + elts = list(node.elts) + last = elts.pop() + for elt in elts: + self.visit(elt) + self._write(', ') + self.visit(last) + self._write('}') + + # ListComp(expr elt, comprehension* generators) + def visit_ListComp(self, node): + self._write('[') + self.visit(node.elt) + for generator in node.generators: + # comprehension = (expr target, expr iter, expr* ifs) + self._write(' for ') + self.visit(generator.target) + self._write(' in ') + self.visit(generator.iter) + for ifexpr in generator.ifs: + self._write(' if ') + self.visit(ifexpr) + self._write(']') + + # GeneratorExp(expr elt, comprehension* generators) + def visit_GeneratorExp(self, node): + self._write('(') + self.visit(node.elt) + for generator in node.generators: + # comprehension = (expr target, expr iter, expr* ifs) + self._write(' for ') + self.visit(generator.target) + self._write(' in ') + self.visit(generator.iter) + for ifexpr in generator.ifs: + self._write(' if ') + self.visit(ifexpr) + self._write(')') + + # Yield(expr? value) + def visit_Yield(self, node): + self._write('yield') + if getattr(node, 'value', None): + self._write(' ') + self.visit(node.value) + + comparison_operators = { + ast.Eq: '==', + ast.NotEq: '!=', + ast.Lt: '<', + ast.LtE: '<=', + ast.Gt: '>', + ast.GtE: '>=', + ast.Is: 'is', + ast.IsNot: 'is not', + ast.In: 'in', + ast.NotIn: 'not in', + } + + # Compare(expr left, cmpop* ops, expr* comparators) + @with_parens + def visit_Compare(self, node): + self.visit(node.left) + for op, comparator in zip(node.ops, node.comparators): + self._write(' ' + self.comparison_operators[op.__class__] + ' ') + self.visit(comparator) + + # Call(expr func, expr* args, keyword* keywords, + # expr? starargs, expr? kwargs) + def visit_Call(self, node): + self.visit(node.func) + self._write('(') + first = True + for arg in node.args: + if not first: + self._write(', ') + first = False + self.visit(arg) + + for keyword in node.keywords: + if not first: + self._write(', ') + first = False + # keyword = (identifier arg, expr value) + self._write(keyword.arg) + self._write('=') + self.visit(keyword.value) + if getattr(node, 'starargs', None): + if not first: + self._write(', ') + first = False + self._write('*') + self.visit(node.starargs) + + if getattr(node, 'kwargs', None): + if not first: + self._write(', ') + first = False + self._write('**') + self.visit(node.kwargs) + self._write(')') + + # Repr(expr value) + def visit_Repr(self, node): + self._write('`') + self.visit(node.value) + self._write('`') + + # Num(object n) + def visit_Num(self, node): + self._write(repr(node.n)) + + # Str(string s) + def visit_Str(self, node): + self._write(repr(node.s)) + + # Attribute(expr value, identifier attr, expr_context ctx) + def visit_Attribute(self, node): + self.visit(node.value) + self._write('.') + self._write(node.attr) + + # Subscript(expr value, slice slice, expr_context ctx) + def visit_Subscript(self, node): + self.visit(node.value) + self._write('[') + + def _process_slice(node): + if isinstance(node, ast.Ellipsis): + self._write('...') + elif isinstance(node, ast.Slice): + if getattr(node, 'lower', 'None'): + self.visit(node.lower) + self._write(':') + if getattr(node, 'upper', None): + self.visit(node.upper) + if getattr(node, 'step', None): + self._write(':') + self.visit(node.step) + elif isinstance(node, ast.Index): + self.visit(node.value) + elif isinstance(node, ast.ExtSlice): + self.visit(node.dims[0]) + for dim in node.dims[1:]: + self._write(', ') + self.visit(dim) + else: + raise NotImplemented('Slice type not implemented') + _process_slice(node.slice) + self._write(']') + + # Name(identifier id, expr_context ctx) + def visit_Name(self, node): + self._write(node.id) + + # List(expr* elts, expr_context ctx) + def visit_List(self, node): + self._write('[') + for elt in node.elts: + self.visit(elt) + self._write(', ') + self._write(']') + + # Tuple(expr *elts, expr_context ctx) + def visit_Tuple(self, node): + self._write('(') + for elt in node.elts: + self.visit(elt) + self._write(', ') + self._write(')') + + +class AnnotationAwareVisitor(ast.NodeVisitor): + def visit(self, node): + annotation = node_annotations.get(node) + if annotation is not None: + assert hasattr(annotation, '_fields') + node = annotation + + super(AnnotationAwareVisitor, self).visit(node) + + def apply_transform(self, node): + if node not in node_annotations: + result = self.transform(node) + if result is not None and result is not node: + node_annotations[node] = result + + +class NameLookupRewriteVisitor(AnnotationAwareVisitor): + def __init__(self, transform): + self.transform = transform + self.transformed = set() + self.scopes = [set()] + + def __call__(self, node): + self.visit(node) + return self.transformed + + def visit_Name(self, node): + scope = self.scopes[-1] + if isinstance(node.ctx, ast.Param): + scope.add(node.id) + elif node.id not in scope: + self.transformed.add(node.id) + self.apply_transform(node) + + def visit_FunctionDef(self, node): + self.scopes[-1].add(node.name) + + def visit_alias(self, node): + name = node.asname if node.asname is not None else node.name + self.scopes[-1].add(name) + + def visit_Lambda(self, node): + self.scopes.append(set()) + try: + self.visit(node.args) + self.visit(node.body) + finally: + self.scopes.pop() + + +class ItemLookupOnAttributeErrorVisitor(AnnotationAwareVisitor): + def __init__(self, transform): + self.transform = transform + + def visit_Attribute(self, node): + self.generic_visit(node) + self.apply_transform(node) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/benchmark.py b/lib3/Chameleon-2.9.2/src/chameleon/benchmark.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/benchmark.py @@ -0,0 +1,478 @@ +import unittest +import time +import os +import re +from .utils import text_ + +re_amp = re.compile(r'&(?!([A-Za-z]+|#[0-9]+);)') + +BIGTABLE_ZPT = """\ +<table xmlns="http://www.w3.org/1999/xhtml" +xmlns:tal="http://xml.zope.org/namespaces/tal"> +<tr tal:repeat="row python: options['table']"> +<td tal:repeat="c python: row.values()"> +<span tal:define="d python: c + 1" +tal:attributes="class python: 'column-' + str(d)" +tal:content="python: d" /> +</td> +</tr> +</table>""" + +MANY_STRINGS_ZPT = """\ +<table xmlns="http://www.w3.org/1999/xhtml" +xmlns:tal="http://xml.zope.org/namespaces/tal"> +<tr tal:repeat="i python: xrange(1000)"> +<td tal:content="string: number ${i}" /> +</tr> +</table> +""" + +HELLO_WORLD_ZPT = """\ +<html xmlns="http://www.w3.org/1999/xhtml" +xmlns:tal="http://xml.zope.org/namespaces/tal"> +<body> +<h1>Hello, world!</h1> +</body> +</html> +""" + +I18N_ZPT = """\ +<html xmlns="http://www.w3.org/1999/xhtml" +xmlns:tal="http://xml.zope.org/namespaces/tal" +xmlns:i18n="http://xml.zope.org/namespaces/i18n"> + <body> + <div tal:repeat="i python: xrange(10)"> + <div i18n:translate=""> + Hello world! + </div> + <div i18n:translate="hello_world"> + Hello world! + </div> + <div i18n:translate=""> + <sup>Hello world!</sup> + </div> + </div> + </body> +</html> +""" + + +def benchmark(title): + def decorator(f): + def wrapper(*args): + print( + "==========================\n " \ + "%s\n==========================" % \ + title) + return f(*args) + return wrapper + return decorator + + +def timing(func, *args, **kwargs): + t1 = t2 = time.time() + i = 0 + while t2 - t1 < 3: + func(**kwargs) + func(**kwargs) + func(**kwargs) + func(**kwargs) + i += 4 + t2 = time.time() + return float(10 * (t2 - t1)) / i + + +START = 0 +END = 1 +TAG = 2 + + +def yield_tokens(table=None): + index = [] + tag = index.append + _re_amp = re_amp + tag(START) + yield "<", "html", "", ">\n" + for r in table: + tag(START) + yield "<", "tr", "", ">\n" + + for c in r.values(): + d = c + 1 + tag(START) + yield "<", "td", "", ">\n" + + _tmp5 = d + if not isinstance(_tmp5, unicode): + _tmp5 = str(_tmp5) + if ('&' in _tmp5): + if (';' in _tmp5): + _tmp5 = _re_amp.sub('&', _tmp5) + else: + _tmp5 = _tmp5.replace('&', '&') + if ('<' in _tmp5): + _tmp5 = _tmp5.replace('<', '<') + if ('>' in _tmp5): + _tmp5 = _tmp5.replace('>', '>') + if ('"' in _tmp5): + _tmp5 = _tmp5.replace('"', '"') + _tmp5 = "column-%s" % _tmp5 + + _tmp = d + if (_tmp.__class__ not in (str, unicode, int, float, )): + raise + if (_tmp is not None): + if not isinstance(_tmp, unicode): + _tmp = str(_tmp) + if ('&' in _tmp): + if (';' in _tmp): + _tmp = _re_amp.sub('&', _tmp) + else: + _tmp = _tmp.replace('&', '&') + if ('<' in _tmp): + _tmp = _tmp.replace('<', '<') + if ('>' in _tmp): + _tmp = _tmp.replace('>', '>') + tag(START) + + t = ["classicism"] + + yield "<", "span", " ", t[0], '="', _tmp5, '"', ">\n" + tag(END) + yield "</", "span", ">\n" + tag(END) + yield "</", "td", ">\n" + tag(END) + yield "</", "tr", ">\n" + tag(END) + yield "</", "html", ">\n" + + +def yield_tokens_dict_version(**kwargs): + index = [] + tag = index.append + _re_amp = re_amp + tag(START) + yield "<", "html", "", ">\n" + + for r in kwargs['table']: + kwargs['r'] = r + tag(START) + yield "<", "tr", "", ">\n" + + for c in kwargs['r'].values(): + kwargs['d'] = c + 1 + tag(START) + yield "<", "td", "", ">\n" + + _tmp5 = kwargs['d'] + if not isinstance(_tmp5, unicode): + _tmp5 = str(_tmp5) + if ('&' in _tmp5): + if (';' in _tmp5): + _tmp5 = _re_amp.sub('&', _tmp5) + else: + _tmp5 = _tmp5.replace('&', '&') + if ('<' in _tmp5): + _tmp5 = _tmp5.replace('<', '<') + if ('>' in _tmp5): + _tmp5 = _tmp5.replace('>', '>') + if ('"' in _tmp5): + _tmp5 = _tmp5.replace('"', '"') + _tmp5 = "column-%s" % _tmp5 + + _tmp = kwargs['d'] + if (_tmp.__class__ not in (str, unicode, int, float, )): + raise + if (_tmp is not None): + if not isinstance(_tmp, unicode): + _tmp = str(_tmp) + if ('&' in _tmp): + if (';' in _tmp): + _tmp = _re_amp.sub('&', _tmp) + else: + _tmp = _tmp.replace('&', '&') + if ('<' in _tmp): + _tmp = _tmp.replace('<', '<') + if ('>' in _tmp): + _tmp = _tmp.replace('>', '>') + tag(START) + + t = ["classicism"] + + yield "<", "span", " ", t[0], '="', _tmp5, '"', ">\n" + tag(END) + yield "</", "span", ">\n" + tag(END) + yield "</", "td", ">\n" + tag(END) + yield "</", "tr", ">\n" + tag(END) + yield "</", "html", ">\n" + + +def yield_stream(table=None): + _re_amp = re_amp + yield START, ("html", "", "\n"), None + for r in table: + yield START, ("tr", "", "\n"), None + + for c in r.values(): + d = c + 1 + yield START, ("td", "", "\n"), None + + _tmp5 = d + if not isinstance(_tmp5, unicode): + _tmp5 = str(_tmp5) + if ('&' in _tmp5): + if (';' in _tmp5): + _tmp5 = _re_amp.sub('&', _tmp5) + else: + _tmp5 = _tmp5.replace('&', '&') + if ('<' in _tmp5): + _tmp5 = _tmp5.replace('<', '<') + if ('>' in _tmp5): + _tmp5 = _tmp5.replace('>', '>') + if ('"' in _tmp5): + _tmp5 = _tmp5.replace('"', '"') + _tmp5 = "column-%s" % _tmp5 + + _tmp = d + if (_tmp.__class__ not in (str, unicode, int, float, )): + raise + if (_tmp is not None): + if not isinstance(_tmp, unicode): + _tmp = str(_tmp) + if ('&' in _tmp): + if (';' in _tmp): + _tmp = _re_amp.sub('&', _tmp) + else: + _tmp = _tmp.replace('&', '&') + if ('<' in _tmp): + _tmp = _tmp.replace('<', '<') + if ('>' in _tmp): + _tmp = _tmp.replace('>', '>') + yield START, ("span", "", _tmp, " ", "class", _tmp5), None + + yield END, ("span", "", "\n"), None + yield END, ("td", "", "\n"), None + yield END, ("tr", "", "\n"), None + yield END, ("html", "", "\n"), None + +from itertools import chain + + +def bigtable_python_tokens(table=None, renderer=None): + iterable = renderer(table=table) + stream = chain(*iterable) + return "".join(stream) + + +def bigtable_python_stream(table=None, renderer=None): + stream = renderer(table=table) + return "".join(stream_output(stream)) + + +def bigtable_python_stream_with_filter(table=None, renderer=None): + stream = renderer(table=table) + return "".join(stream_output(uppercase_filter(stream))) + + +def uppercase_filter(stream): + for kind, data, pos in stream: + if kind is START: + data = (data[0], data[1], data[2].upper(),) + data[3:] + elif kind is END: + data = (data[0], data[1], data[2].upper()) + elif kind is TAG: + raise NotImplemented + yield kind, data, pos + + +def stream_output(stream): + for kind, data, pos in stream: + if kind is START: + tag = data[0] + yield "<%s" % tag + l = len(data) + + # optimize for common cases + if l == 3: + pass + elif l == 6: + yield '%s%s="%s"' % (data[3], data[4], data[5]) + else: + i = 3 + while i < l: + yield '%s%s="%s"' % (data[i], data[i + 1], data[i + 2]) + i += 3 + yield "%s>%s" % (data[1], data[2]) + elif kind is END: + yield "</%s%s>%s" % data + elif kind is TAG: + raise NotImplemented + + +class Benchmarks(unittest.TestCase): + table = [dict(a=1, b=2, c=3, d=4, e=5, f=6, g=7, h=8, i=9, j=10) \ + for x in range(1000)] + + def setUp(self): + # set up i18n component + from zope.i18n import translate + from zope.i18n.interfaces import INegotiator + from zope.i18n.interfaces import ITranslationDomain + from zope.i18n.negotiator import Negotiator + from zope.i18n.simpletranslationdomain import SimpleTranslationDomain + from zope.i18n.tests.test_negotiator import Env + from zope.tales.tales import Context + + self.env = Env(('klingon', 'da', 'en', 'fr', 'no')) + + class ZopeI18NContext(Context): + + def translate(self, msgid, domain=None, context=None, + mapping=None, default=None): + context = self.vars['options']['env'] + return translate(msgid, domain, mapping, + context=context, default=default) + + def _getContext(self, contexts=None, **kwcontexts): + if contexts is not None: + if kwcontexts: + kwcontexts.update(contexts) + else: + kwcontexts = contexts + return ZopeI18NContext(self, kwcontexts) + + def _pt_getEngineContext(namespace): + self = namespace['template'] + engine = self.pt_getEngine() + return _getContext(engine, namespace) + + import zope.component + zope.component.provideUtility(Negotiator(), INegotiator) + catalog = SimpleTranslationDomain('domain') + zope.component.provideUtility(catalog, ITranslationDomain, 'domain') + self.files = os.path.abspath(os.path.join(__file__, '..', 'input')) + + @staticmethod + def _chameleon(body, **kwargs): + from .zpt.template import PageTemplate + return PageTemplate(body, **kwargs) + + @staticmethod + def _zope(body): + from zope.pagetemplate.pagetemplatefile import PageTemplate + template = PageTemplate() + template.pt_edit(body, 'text/xhtml') + return template + + @benchmark(text_("BIGTABLE [python]")) + def test_bigtable(self): + options = {'table': self.table} + + t_chameleon = timing(self._chameleon(BIGTABLE_ZPT), options=options) + print("chameleon: %7.2f" % t_chameleon) + + t_chameleon_utf8 = timing( + self._chameleon(BIGTABLE_ZPT, encoding='utf-8'), options=options) + print("chameleon (utf-8): %7.2f" % t_chameleon_utf8) + + t_tokens = timing( + bigtable_python_tokens, table=self.table, renderer=yield_tokens) + print("token: %7.2f" % t_tokens) + + t_tokens_dict_version = timing( + bigtable_python_tokens, table=self.table, + renderer=yield_tokens_dict_version) + print("token (dict): %7.2f" % t_tokens_dict_version) + + t_stream = timing( + bigtable_python_stream, table=self.table, renderer=yield_stream) + print("stream: %7.2f" % t_stream) + + t_zope = timing(self._zope(BIGTABLE_ZPT), table=self.table) + print("zope.pagetemplate: %7.2f" % t_zope) + print(" %7.1fX" % (t_zope / t_chameleon)) + + print("--------------------------") + print("check: %d vs %d" % ( + len(self._chameleon(BIGTABLE_ZPT)(options=options)), + len(self._zope(BIGTABLE_ZPT)(table=self.table)))) + print("--------------------------") + + @benchmark(text_("MANY STRINGS [python]")) + def test_many_strings(self): + t_chameleon = timing(self._chameleon(MANY_STRINGS_ZPT)) + print("chameleon: %7.2f" % t_chameleon) + t_zope = timing(self._zope(MANY_STRINGS_ZPT)) + print("zope.pagetemplate: %7.2f" % t_zope) + print(" %7.1fX" % (t_zope / t_chameleon)) + + print("--------------------------") + print("check: %d vs %d" % ( + len(self._chameleon(MANY_STRINGS_ZPT)()), + len(self._zope(MANY_STRINGS_ZPT)()))) + print("--------------------------") + + @benchmark(text_("HELLO WORLD")) + def test_hello_world(self): + t_chameleon = timing(self._chameleon(HELLO_WORLD_ZPT)) * 1000 + print("chameleon: %7.2f" % t_chameleon) + t_zope = timing(self._zope(HELLO_WORLD_ZPT)) * 1000 + print("zope.pagetemplate: %7.2f" % t_zope) + print(" %7.1fX" % (t_zope / t_chameleon)) + + print("--------------------------") + print("check: %d vs %d" % ( + len(self._chameleon(HELLO_WORLD_ZPT)()), + len(self._zope(HELLO_WORLD_ZPT)()))) + print("--------------------------") + + @benchmark(text_("I18N")) + def test_i18n(self): + from zope.i18n import translate + t_chameleon = timing( + self._chameleon(I18N_ZPT), + translate=translate, + language="klingon") * 1000 + print("chameleon: %7.2f" % t_chameleon) + t_zope = timing(self._zope(I18N_ZPT), env=self.env) * 1000 + print("zope.pagetemplate: %7.2f" % t_zope) + print(" %7.1fX" % (t_zope / t_chameleon)) + + @benchmark(text_("COMPILATION")) + def test_compilation(self): + template = self._chameleon(HELLO_WORLD_ZPT) + + def chameleon_cook_and_render(template=template): + template.cook(HELLO_WORLD_ZPT) + template() + + t_chameleon = timing(chameleon_cook_and_render) * 1000 + print("chameleon: %7.2f" % t_chameleon) + + template = self._zope(HELLO_WORLD_ZPT) + + def zope_cook_and_render(templte=template): + template._cook() + template() + + t_zope = timing(zope_cook_and_render) * 1000 + print("zope.pagetemplate: %7.2f" % t_zope) + print(" %0.3fX" % (t_zope / t_chameleon)) + + +def start(): + result = unittest.TestResult() + test = unittest.makeSuite(Benchmarks) + test.run(result) + + for error in result.errors: + print("Error in %s...\n" % error[0]) + print(error[1]) + + for failure in result.failures: + print("Failure in %s...\n" % failure[0]) + print(failure[1]) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/codegen.py b/lib3/Chameleon-2.9.2/src/chameleon/codegen.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/codegen.py @@ -0,0 +1,221 @@ +try: + import ast +except ImportError: + from chameleon import ast24 as ast + +import inspect +import textwrap +import types +import copy + +try: + import __builtin__ as builtins +except ImportError: + import builtins + +reverse_builtin_map = {} +for name, value in builtins.__dict__.items(): + try: + hash(value) + except TypeError: + continue + + reverse_builtin_map[value] = name + +try: + basestring +except NameError: + basestring = str + +from .astutil import ASTCodeGenerator +from .astutil import load +from .astutil import store +from .astutil import parse +from .astutil import Builtin +from .astutil import Symbol +from .astutil import node_annotations + +from .exc import CompilationError + + +try: + NATIVE_NUMBERS = int, float, long, bool +except NameError: + NATIVE_NUMBERS = int, float, bool + + +def template(function, mode='exec', **kw): + def wrapper(*vargs, **kwargs): + symbols = dict(zip(args, vargs + defaults)) + symbols.update(kwargs) + + class Visitor(ast.NodeVisitor): + def visit_Name(self, node): + value = symbols.get(node.id, self) + if value is not self: + if isinstance(value, basestring): + value = load(value) + if isinstance(value, type) or value in reverse_builtin_map: + name = reverse_builtin_map.get(value) + if name is not None: + value = Builtin(name) + else: + value = Symbol(value) + + assert node not in node_annotations + assert hasattr(value, '_fields') + node_annotations[node] = value + + expr = parse(source, mode=mode) + if not isinstance(function, basestring): + expr = expr.body[0] + + Visitor().visit(expr) + return expr.body + + if isinstance(function, basestring): + source = function + defaults = args = () + return wrapper(**kw) + + source = textwrap.dedent(inspect.getsource(function)) + argspec = inspect.getargspec(function) + args = argspec[0] + defaults = argspec[3] or () + return wrapper + + +class TemplateCodeGenerator(ASTCodeGenerator): + """Extends the standard Python code generator class with handlers + for the helper node classes: + + - Symbol (an importable value) + - Static (value that can be made global) + - Builtin (from the builtins module) + - Marker (short-hand for a unique static object) + + """ + + names = () + + def __init__(self, tree): + self.imports = {} + self.defines = {} + self.markers = {} + + # Generate code + super(TemplateCodeGenerator, self).__init__(tree) + + def visit_Module(self, node): + super(TemplateCodeGenerator, self).visit_Module(node) + + # Make sure we terminate the line printer + self.flush() + + # Clear lines array for import visits + body = self.lines + self.lines = [] + + while self.defines: + name, node = self.defines.popitem() + assignment = ast.Assign(targets=[store(name)], value=node) + self.visit(assignment) + + # Make sure we terminate the line printer + self.flush() + + # Clear lines array for import visits + defines = self.lines + self.lines = [] + + while self.imports: + value, node = self.imports.popitem() + + if isinstance(value, types.ModuleType): + stmt = ast.Import( + names=[ast.alias(name=value.__name__, asname=node.id)]) + elif hasattr(value, '__name__'): + path = reverse_builtin_map.get(value) + if path is None: + path = value.__module__ + name = value.__name__ + stmt = ast.ImportFrom( + module=path, + names=[ast.alias(name=name, asname=node.id)], + level=0, + ) + else: + raise TypeError(value) + + self.visit(stmt) + + # Clear last import + self.flush() + + # Stich together lines + self.lines += defines + body + + def define(self, name, node): + assert node is not None + value = self.defines.get(name) + + if value is node: + pass + elif value is None: + self.defines[name] = node + else: + raise CompilationError( + "Duplicate symbol name for define.", name) + + return load(name) + + def require(self, value): + if value is None: + return load("None") + + if isinstance(value, NATIVE_NUMBERS): + return ast.Num(value) + + node = self.imports.get(value) + if node is None: + # we come up with a unique symbol based on the class name + name = "_%s" % getattr(value, '__name__', str(value)).\ + rsplit('.', 1)[-1] + node = load(name) + self.imports[value] = store(node.id) + + return node + + def visit(self, node): + annotation = node_annotations.get(node) + if annotation is None: + super(TemplateCodeGenerator, self).visit(node) + else: + self.visit(annotation) + + def visit_Comment(self, node): + if node.stmt is None: + self._new_line() + else: + self.visit(node.stmt) + + for line in node.text.replace('\r', '\n').split('\n'): + self._new_line() + self._write("%s#%s" % (node.space, line)) + + def visit_Builtin(self, node): + name = load(node.id) + self.visit(name) + + def visit_Symbol(self, node): + node = self.require(node.value) + self.visit(node) + + def visit_Static(self, node): + if node.name is None: + name = "_static_%d" % id(node.value) + else: + name = node.name + + node = self.define(name, node.value) + self.visit(node) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/compiler.py b/lib3/Chameleon-2.9.2/src/chameleon/compiler.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/compiler.py @@ -0,0 +1,1553 @@ +import re +import sys +import itertools +import logging +import threading +import functools +import collections +import pickle +import textwrap + +from .astutil import load +from .astutil import store +from .astutil import param +from .astutil import swap +from .astutil import subscript +from .astutil import node_annotations +from .astutil import annotated +from .astutil import NameLookupRewriteVisitor +from .astutil import Comment +from .astutil import Symbol +from .astutil import Builtin + +from .codegen import TemplateCodeGenerator +from .codegen import template + +from .tal import ErrorInfo +from .tal import NAME +from .i18n import fast_translate + +from .nodes import Text +from .nodes import Value +from .nodes import Substitution +from .nodes import Assignment +from .nodes import Module +from .nodes import Context + +from .tokenize import Token +from .config import DEBUG_MODE +from .exc import TranslationError +from .exc import ExpressionError +from .parser import groupdict + +from .utils import DebuggingOutputStream +from .utils import char2entity +from .utils import ListDictProxy +from .utils import native_string +from .utils import byte_string +from .utils import string_type +from .utils import unicode_string +from .utils import version +from .utils import ast +from .utils import safe_native +from .utils import builtins +from .utils import decode_htmlentities + + +if version >= (3, 0, 0): + long = int + +log = logging.getLogger('chameleon.compiler') + +COMPILER_INTERNALS_OR_DISALLOWED = set([ + "econtext", + "rcontext", + "str", + "int", + "float", + "long", + "len", + "None", + "True", + "False", + "RuntimeError", + ]) + + +RE_MANGLE = re.compile('[^\w_]') +RE_NAME = re.compile('^%s$' % NAME) + +if DEBUG_MODE: + LIST = template("cls()", cls=DebuggingOutputStream, mode="eval") +else: + LIST = template("[]", mode="eval") + + +def identifier(prefix, suffix=None): + return "__%s_%s" % (prefix, mangle(suffix or id(prefix))) + + +def mangle(string): + return RE_MANGLE.sub('_', str(string)).replace('\n', '') + + +def load_econtext(name): + return template("getitem(KEY)", KEY=ast.Str(s=name), mode="eval") + + +def store_econtext(name): + name = native_string(name) + return subscript(name, load("econtext"), ast.Store()) + + +def store_rcontext(name): + name = native_string(name) + return subscript(name, load("rcontext"), ast.Store()) + + +def set_error(token, exception): + try: + line, column = token.location + filename = token.filename + except AttributeError: + line, column = 0, 0 + filename = "<string>" + + string = safe_native(token) + + return template( + "rcontext.setdefault('__error__', [])." + "append((string, line, col, src, exc))", + string=ast.Str(s=string), + line=ast.Num(n=line), + col=ast.Num(n=column), + src=ast.Str(s=filename), + sys=Symbol(sys), + exc=exception, + ) + + +def try_except_wrap(stmts, token): + exception = template( + "exc_info()[1]", exc_info=Symbol(sys.exc_info), mode="eval" + ) + + body = set_error(token, exception) + template("raise") + + return ast.TryExcept( + body=stmts, + handlers=[ast.ExceptHandler(body=body)], + ) + + + at template +def emit_node(node): # pragma: no cover + __append(node) + + + at template +def emit_node_if_non_trivial(node): # pragma: no cover + if node is not None: + __append(node) + + + at template +def emit_bool(target, s, default_marker=None, + default=None): # pragma: no cover + if target is default_marker: + target = default + elif target: + target = s + else: + target = None + + + at template +def emit_convert( + target, encoded=byte_string, str=unicode_string, + long=long, type=type, + default_marker=None, default=None): # pragma: no cover + if target is None: + pass + elif target is default_marker: + target = default + else: + __tt = type(target) + + if __tt is int or __tt is float or __tt is long: + target = str(target) + elif __tt is encoded: + target = decode(target) + elif __tt is not str: + try: + target = target.__html__ + except AttributeError: + __converted = convert(target) + target = str(target) if target is __converted else __converted + else: + target = target() + + + at template +def emit_translate(target, msgid, default=None): # pragma: no cover + target = translate(msgid, default=default, domain=__i18n_domain) + + + at template +def emit_convert_and_escape( + target, quote=None, quote_entity=None, str=unicode_string, long=long, + type=type, encoded=byte_string, + default_marker=None, default=None): # pragma: no cover + if target is None: + pass + elif target is default_marker: + target = default + else: + __tt = type(target) + + if __tt is int or __tt is float or __tt is long: + target = str(target) + else: + try: + if __tt is encoded: + target = decode(target) + elif __tt is not str: + try: + target = target.__html__ + except: + __converted = convert(target) + target = str(target) if target is __converted \ + else __converted + else: + raise RuntimeError + except RuntimeError: + target = target() + else: + if target is not None: + try: + escape = __re_needs_escape(target) is not None + except TypeError: + pass + else: + if escape: + # Character escape + if '&' in target: + target = target.replace('&', '&') + if '<' in target: + target = target.replace('<', '<') + if '>' in target: + target = target.replace('>', '>') + if quote is not None and quote in target: + target = target.replace(quote, quote_entity) + + +class Interpolator(object): + braces_required_regex = re.compile( + r'(?<!\\)\$({(?P<expression>.*)})', + re.DOTALL) + + braces_optional_regex = re.compile( + r'(?<!\\)\$({(?P<expression>.*)}|(?P<variable>[A-Za-z][A-Za-z0-9_]*))', + re.DOTALL) + + def __init__(self, expression, braces_required, translate=False): + self.expression = expression + self.regex = self.braces_required_regex if braces_required else \ + self.braces_optional_regex + self.translate = translate + + def __call__(self, name, engine): + """The strategy is to find possible expression strings and + call the ``validate`` function of the parser to validate. + + For every possible starting point, the longest possible + expression is tried first, then the second longest and so + forth. + + Example 1: + + ${'expressions use the ${<expression>} format'} + + The entire expression is attempted first and it is also the + only one that validates. + + Example 2: + + ${'Hello'} ${'world!'} + + Validation of the longest possible expression (the entire + string) will fail, while the second round of attempts, + ``${'Hello'}`` and ``${'world!'}`` respectively, validate. + + """ + + body = [] + nodes = [] + text = self.expression + + expr_map = {} + translate = self.translate + + while text: + matched = text + m = self.regex.search(matched) + if m is None: + nodes.append(ast.Str(s=text)) + break + + part = text[:m.start()] + text = text[m.start():] + + if part: + node = ast.Str(s=part) + nodes.append(node) + + if not body: + target = name + else: + target = store("%s_%d" % (name.id, text.pos)) + + while True: + d = groupdict(m, matched) + string = d["expression"] or d["variable"] or "" + + string = decode_htmlentities(string) + + try: + compiler = engine.parse(string) + body += compiler.assign_text(target) + except ExpressionError: + matched = matched[m.start():m.end() - 1] + m = self.regex.search(matched) + if m is None: + raise + else: + break + + # If one or more expressions are not simple names, we + # disable translation. + if RE_NAME.match(string) is None: + translate = False + + # if this is the first expression, use the provided + # assignment name; otherwise, generate one (here based + # on the string position) + node = load(target.id) + nodes.append(node) + + expr_map[node] = safe_native(string) + + text = text[len(m.group()):] + + if len(nodes) == 1: + target = nodes[0] + + if translate and isinstance(target, ast.Str): + target = template( + "translate(msgid, domain=__i18n_domain, context=econtext)", + msgid=target, mode="eval", + ) + else: + if translate: + formatting_string = "" + keys = [] + values = [] + + for node in nodes: + if isinstance(node, ast.Str): + formatting_string += node.s + else: + string = expr_map[node] + formatting_string += "${%s}" % string + keys.append(ast.Str(s=string)) + values.append(node) + + target = template( + "translate(msgid, mapping=mapping, domain=__i18n_domain, context=econtext)", + msgid=ast.Str(s=formatting_string), + mapping=ast.Dict(keys=keys, values=values), + mode="eval" + ) + else: + nodes = [ + template( + "NODE if NODE is not None else ''", + NODE=node, mode="eval" + ) + for node in nodes + ] + + target = ast.BinOp( + left=ast.Str(s="%s" * len(nodes)), + op=ast.Mod(), + right=ast.Tuple(elts=nodes, ctx=ast.Load())) + + body += [ast.Assign(targets=[name], value=target)] + return body + + +class ExpressionEngine(object): + """Expression engine. + + This test demonstrates how to configure and invoke the engine. + + >>> from chameleon import tales + >>> parser = tales.ExpressionParser({ + ... 'python': tales.PythonExpr, + ... 'not': tales.NotExpr, + ... 'exists': tales.ExistsExpr, + ... 'string': tales.StringExpr, + ... }, 'python') + + >>> engine = ExpressionEngine(parser) + + An expression evaluation function: + + >>> eval = lambda expression: tales.test( + ... tales.IdentityExpr(expression), engine) + + We have provided 'python' as the default expression type. This + means that when no prefix is given, the expression is evaluated as + a Python expression: + + >>> eval('not False') + True + + Note that the ``type`` prefixes bind left. If ``not`` and + ``exits`` are two expression type prefixes, consider the + following:: + + >>> eval('not: exists: int(None)') + True + + The pipe operator binds right. In the following example, but + arguments are evaluated against ``not: exists: ``. + + >>> eval('not: exists: help') + False + + >>> eval('string:test ${1}${2}') + 'test 12' + + """ + + supported_char_escape_set = set(('&', '<', '>')) + + def __init__(self, parser, char_escape=(), + default=None, default_marker=None): + self._parser = parser + self._char_escape = char_escape + self._default = default + self._default_marker = default_marker + + def __call__(self, string, target): + # BBB: This method is deprecated. Instead, a call should first + # be made to ``parse`` and then one of the assignment methods + # ("value" or "text"). + + compiler = self.parse(string) + return compiler(string, target) + + def parse(self, string): + expression = self._parser(string) + compiler = self.get_compiler(expression, string) + return ExpressionCompiler(compiler, self) + + def get_compiler(self, expression, string): + def compiler(target, engine, result_type=None, *args): + stmts = expression(target, engine) + + if result_type is not None: + method = getattr(self, '_convert_%s' % result_type) + steps = method(target, *args) + stmts.extend(steps) + + return [try_except_wrap(stmts, string)] + + return compiler + + def _convert_bool(self, target, s): + """Converts value given by ``target`` to a string ``s`` if the + target is a true value, otherwise ``None``. + """ + + return emit_bool( + target, ast.Str(s=s), + default=self._default, + default_marker=self._default_marker + ) + + def _convert_text(self, target): + """Converts value given by ``target`` to text.""" + + if self._char_escape: + # This is a cop-out - we really only support a very select + # set of escape characters + other = set(self._char_escape) - self.supported_char_escape_set + + if other: + for supported in '"', '\'', '': + if supported in self._char_escape: + quote = supported + break + else: + raise RuntimeError( + "Unsupported escape set: %s." % repr(self._char_escape) + ) + else: + quote = '\0' + + entity = char2entity(quote or '\0') + + return emit_convert_and_escape( + target, + quote=ast.Str(s=quote), + quote_entity=ast.Str(s=entity), + default=self._default, + default_marker=self._default_marker, + ) + + return emit_convert( + target, + default=self._default, + default_marker=self._default_marker, + ) + + +class ExpressionCompiler(object): + def __init__(self, compiler, engine): + self.compiler = compiler + self.engine = engine + + def assign_bool(self, target, s): + return self.compiler(target, self.engine, "bool", s) + + def assign_text(self, target): + return self.compiler(target, self.engine, "text") + + def assign_value(self, target): + return self.compiler(target, self.engine) + + +class ExpressionEvaluator(object): + """Evaluates dynamic expression. + + This is not particularly efficient, but supported for legacy + applications. + + >>> from chameleon import tales + >>> parser = tales.ExpressionParser({'python': tales.PythonExpr}, 'python') + >>> engine = functools.partial(ExpressionEngine, parser) + + >>> evaluate = ExpressionEvaluator(engine, { + ... 'foo': 'bar', + ... }) + + The evaluation function is passed the local and remote context, + the expression type and finally the expression. + + >>> evaluate({'boo': 'baz'}, {}, 'python', 'foo + boo') + 'barbaz' + + The cache is now primed: + + >>> evaluate({'boo': 'baz'}, {}, 'python', 'foo + boo') + 'barbaz' + + Note that the call method supports currying of the expression + argument: + + >>> python = evaluate({'boo': 'baz'}, {}, 'python') + >>> python('foo + boo') + 'barbaz' + + """ + + __slots__ = "_engine", "_cache", "_names", "_builtins" + + def __init__(self, engine, builtins): + self._engine = engine + self._names, self._builtins = zip(*builtins.items()) + self._cache = {} + + def __call__(self, econtext, rcontext, expression_type, string=None): + if string is None: + return functools.partial( + self.__call__, econtext, rcontext, expression_type + ) + + expression = "%s:%s" % (expression_type, string) + + try: + evaluate = self._cache[expression] + except KeyError: + assignment = Assignment(["_result"], expression, True) + module = Module("evaluate", Context(assignment)) + + compiler = Compiler( + self._engine, module, ('econtext', 'rcontext') + self._names + ) + + env = {} + exec(compiler.code, env) + evaluate = self._cache[expression] = env["evaluate"] + + evaluate(econtext, rcontext, *self._builtins) + return econtext['_result'] + + +class NameTransform(object): + """ + >>> nt = NameTransform( + ... set(('foo', 'bar', )), {'boo': 'boz'}, + ... ('econtext', ), + ... ) + + >>> def test(node): + ... rewritten = nt(node) + ... module = ast.Module([ast.fix_missing_locations(rewritten)]) + ... codegen = TemplateCodeGenerator(module) + ... return codegen.code + + Any odd name: + + >>> test(load('frobnitz')) + "getitem('frobnitz')" + + A 'builtin' name will first be looked up via ``get`` allowing fall + back to the global builtin value: + + >>> test(load('foo')) + "get('foo', foo)" + + Internal names (with two leading underscores) are left alone: + + >>> test(load('__internal')) + '__internal' + + Compiler internals or disallowed names: + + >>> test(load('econtext')) + 'econtext' + + Aliased names: + + >>> test(load('boo')) + 'boz' + + """ + + def __init__(self, builtins, aliases, internals): + self.builtins = builtins + self.aliases = aliases + self.internals = internals + + def __call__(self, node): + name = node.id + + # Don't rewrite names that begin with an underscore; they are + # internal and can be assumed to be locally defined. This + # policy really should be part of the template program, not + # defined here in the compiler. + if name.startswith('__') or name in self.internals: + return node + + if isinstance(node.ctx, ast.Store): + return store_econtext(name) + + aliased = self.aliases.get(name) + if aliased is not None: + return load(aliased) + + # If the name is a Python global, first try acquiring it from + # the dynamic context, then fall back to the global. + if name in self.builtins: + return template( + "get(key, name)", + mode="eval", + key=ast.Str(s=name), + name=load(name), + ) + + # Otherwise, simply acquire it from the dynamic context. + return load_econtext(name) + + +class ExpressionTransform(object): + """Internal wrapper to transform expression nodes into assignment + statements. + + The node input may use the provided expression engine, but other + expression node types are supported such as ``Builtin`` which + simply resolves a built-in name. + + Used internally be the compiler. + """ + + loads_symbol = Symbol(pickle.loads) + + def __init__(self, engine_factory, cache, visitor, strict=True): + self.engine_factory = engine_factory + self.cache = cache + self.strict = strict + self.visitor = visitor + + def __call__(self, expression, target): + if isinstance(target, string_type): + target = store(target) + + try: + stmts = self.translate(expression, target) + except ExpressionError: + if self.strict: + raise + + exc = sys.exc_info()[1] + p = pickle.dumps(exc) + + stmts = template( + "__exc = loads(p)", loads=self.loads_symbol, p=ast.Str(s=p) + ) + + token = Token(exc.token, exc.offset, filename=exc.filename) + + stmts += set_error(token, load("__exc")) + stmts += [ast.Raise(exc=load("__exc"))] + + # Apply visitor to each statement + for stmt in stmts: + self.visitor(stmt) + + return stmts + + def translate(self, expression, target): + if isinstance(target, string_type): + target = store(target) + + cached = self.cache.get(expression) + + if cached is not None: + stmts = [ast.Assign(targets=[target], value=cached)] + elif isinstance(expression, ast.expr): + stmts = [ast.Assign(targets=[target], value=expression)] + else: + # The engine interface supports simple strings, which + # default to expression nodes + if isinstance(expression, string_type): + expression = Value(expression, True) + + kind = type(expression).__name__ + visitor = getattr(self, "visit_%s" % kind) + stmts = visitor(expression, target) + + # Add comment + target_id = getattr(target, "id", target) + comment = Comment(" %r -> %s" % (expression, target_id)) + stmts.insert(0, comment) + + return stmts + + def visit_Value(self, node, target): + engine = self.engine_factory() + compiler = engine.parse(node.value) + return compiler.assign_value(target) + + def visit_Default(self, node, target): + value = annotated(node.marker) + return [ast.Assign(targets=[target], value=value)] + + def visit_Substitution(self, node, target): + engine = self.engine_factory( + char_escape=node.char_escape, + default=node.default, + ) + compiler = engine.parse(node.value) + return compiler.assign_text(target) + + def visit_Negate(self, node, target): + return self.translate(node.value, target) + \ + template("TARGET = not TARGET", TARGET=target) + + def visit_Identity(self, node, target): + expression = self.translate(node.expression, "__expression") + value = self.translate(node.value, "__value") + + return expression + value + \ + template("TARGET = __expression is __value", TARGET=target) + + def visit_Equality(self, node, target): + expression = self.translate(node.expression, "__expression") + value = self.translate(node.value, "__value") + + return expression + value + \ + template("TARGET = __expression == __value", TARGET=target) + + def visit_Boolean(self, node, target): + engine = self.engine_factory() + compiler = engine.parse(node.value) + return compiler.assign_bool(target, node.s) + + def visit_Interpolation(self, node, target): + expr = node.value + if isinstance(expr, Substitution): + engine = self.engine_factory( + char_escape=expr.char_escape, + default=expr.default, + ) + elif isinstance(expr, Value): + engine = self.engine_factory() + else: + raise RuntimeError("Bad value: %r." % node.value) + + interpolator = Interpolator( + expr.value, node.braces_required, node.translation + ) + + compiler = engine.get_compiler(interpolator, expr.value) + return compiler(target, engine) + + def visit_Translate(self, node, target): + if node.msgid is not None: + msgid = ast.Str(s=node.msgid) + else: + msgid = target + return self.translate(node.node, target) + \ + emit_translate(target, msgid, default=target) + + def visit_Static(self, node, target): + value = annotated(node) + return [ast.Assign(targets=[target], value=value)] + + def visit_Builtin(self, node, target): + value = annotated(node) + return [ast.Assign(targets=[target], value=value)] + + +class Compiler(object): + """Generic compiler class. + + Iterates through nodes and yields Python statements which form a + template program. + """ + + exceptions = NameError, \ + ValueError, \ + AttributeError, \ + LookupError, \ + TypeError + + defaults = { + 'translate': Symbol(fast_translate), + 'decode': Builtin("str"), + 'convert': Builtin("str"), + } + + lock = threading.Lock() + + global_builtins = set(builtins.__dict__) + + def __init__(self, engine_factory, node, builtins={}, strict=True): + self._scopes = [set()] + self._expression_cache = {} + self._translations = [] + self._builtins = builtins + self._aliases = [{}] + self._macros = [] + self._current_slot = [] + + internals = COMPILER_INTERNALS_OR_DISALLOWED | \ + set(self.defaults) + + transform = NameTransform( + self.global_builtins | set(builtins), + ListDictProxy(self._aliases), + internals, + ) + + self._visitor = visitor = NameLookupRewriteVisitor(transform) + + self._engine = ExpressionTransform( + engine_factory, + self._expression_cache, + visitor, + strict=strict, + ) + + if isinstance(node_annotations, dict): + self.lock.acquire() + backup = node_annotations.copy() + else: + backup = None + + try: + module = ast.Module([]) + module.body += self.visit(node) + ast.fix_missing_locations(module) + generator = TemplateCodeGenerator(module) + finally: + if backup is not None: + node_annotations.clear() + node_annotations.update(backup) + self.lock.release() + + self.code = generator.code + + def visit(self, node): + if node is None: + return () + kind = type(node).__name__ + visitor = getattr(self, "visit_%s" % kind) + iterator = visitor(node) + return list(iterator) + + def visit_Sequence(self, node): + for item in node.items: + for stmt in self.visit(item): + yield stmt + + def visit_Element(self, node): + self._aliases.append(self._aliases[-1].copy()) + + for stmt in self.visit(node.start): + yield stmt + + for stmt in self.visit(node.content): + yield stmt + + if node.end is not None: + for stmt in self.visit(node.end): + yield stmt + + self._aliases.pop() + + def visit_Module(self, node): + body = [] + + body += template("import re") + body += template("import functools") + body += template("__marker = object()") + body += template( + r"g_re_amp = re.compile(r'&(?!([A-Za-z]+|#[0-9]+);)')" + ) + body += template( + r"g_re_needs_escape = re.compile(r'[&<>\"\']').search") + + body += template( + r"__re_whitespace = " + r"functools.partial(re.compile('\s+').sub, ' ')", + ) + + # Visit module content + program = self.visit(node.program) + + body += [ast.FunctionDef( + name=node.name, args=ast.arguments( + args=[param(b) for b in self._builtins], + defaults=(), + ), + body=program + )] + + return body + + def visit_MacroProgram(self, node): + functions = [] + + # Visit defined macros + macros = getattr(node, "macros", ()) + names = [] + for macro in macros: + stmts = self.visit(macro) + function = stmts[-1] + names.append(function.name) + functions += stmts + + # Return function dictionary + functions += [ast.Return(value=ast.Dict( + keys=[ast.Str(s=name) for name in names], + values=[load(name) for name in names], + ))] + + return functions + + def visit_Context(self, node): + return template("getitem = econtext.__getitem__") + \ + template("get = econtext.get") + \ + self.visit(node.node) + + def visit_Macro(self, node): + body = [] + + # Initialization + body += template("__append = __stream.append") + body += template("__re_amp = g_re_amp") + body += template("__re_needs_escape = g_re_needs_escape") + + # Resolve defaults + for name in self.defaults: + body += template( + "NAME = econtext[KEY]", + NAME=name, KEY=ast.Str(s="__" + name) + ) + + # Internal set of defined slots + self._slots = set() + + # Visit macro body + nodes = itertools.chain(*tuple(map(self.visit, node.body))) + + # Slot resolution + for name in self._slots: + body += template( + "try: NAME = econtext[KEY].pop()\n" + "except: NAME = None", + KEY=ast.Str(s=name), NAME=store(name)) + + # Append visited nodes + body += nodes + + function_name = "render" if node.name is None else \ + "render_%s" % mangle(node.name) + + function = ast.FunctionDef( + name=function_name, args=ast.arguments( + args=[ + param("__stream"), + param("econtext"), + param("rcontext"), + param("__i18n_domain"), + ], + defaults=[load("None")], + ), + body=body + ) + + yield function + + def visit_Text(self, node): + return emit_node(ast.Str(s=node.value)) + + def visit_Domain(self, node): + backup = "__previous_i18n_domain_%d" % id(node) + return template("BACKUP = __i18n_domain", BACKUP=backup) + \ + template("__i18n_domain = NAME", NAME=ast.Str(s=node.name)) + \ + self.visit(node.node) + \ + template("__i18n_domain = BACKUP", BACKUP=backup) + + def visit_OnError(self, node): + body = [] + + fallback = identifier("__fallback") + body += template("fallback = len(__stream)", fallback=fallback) + + self._enter_assignment((node.name, )) + fallback_body = self.visit(node.fallback) + self._leave_assignment((node.name, )) + + error_assignment = template( + "econtext[key] = cls(__exc, rcontext['__error__'][-1][1:3])", + cls=ErrorInfo, + key=ast.Str(s=node.name), + ) + + body += [ast.TryExcept( + body=self.visit(node.node), + handlers=[ast.ExceptHandler( + type=ast.Tuple(elts=[Builtin("Exception")], ctx=ast.Load()), + name=store("__exc"), + body=(error_assignment + \ + template("del __stream[fallback:]", fallback=fallback) + \ + fallback_body + ), + )] + )] + + return body + + def visit_Content(self, node): + name = "__content" + body = self._engine(node.expression, store(name)) + + if node.translate: + body += emit_translate(name, name) + + if node.char_escape: + body += emit_convert_and_escape(name) + else: + body += emit_convert(name) + + body += template("if NAME is not None: __append(NAME)", NAME=name) + + return body + + def visit_Interpolation(self, node): + name = identifier("content") + return self._engine(node, name) + \ + emit_node_if_non_trivial(name) + + def visit_Alias(self, node): + assert len(node.names) == 1 + name = node.names[0] + target = self._aliases[-1][name] = identifier(name, id(node)) + return self._engine(node.expression, target) + + def visit_Assignment(self, node): + for name in node.names: + if name in COMPILER_INTERNALS_OR_DISALLOWED: + raise TranslationError( + "Name disallowed by compiler.", name + ) + + if name.startswith('__'): + raise TranslationError( + "Name disallowed by compiler (double underscore).", + name + ) + + assignment = self._engine(node.expression, store("__value")) + + if len(node.names) != 1: + target = ast.Tuple( + elts=[store_econtext(name) for name in node.names], + ctx=ast.Store(), + ) + else: + target = store_econtext(node.names[0]) + + assignment.append(ast.Assign(targets=[target], value=load("__value"))) + + for name in node.names: + if not node.local: + assignment += template( + "rcontext[KEY] = __value", KEY=ast.Str(s=native_string(name)) + ) + + return assignment + + def visit_Define(self, node): + scope = set(self._scopes[-1]) + self._scopes.append(scope) + + for assignment in node.assignments: + if assignment.local: + for stmt in self._enter_assignment(assignment.names): + yield stmt + + for stmt in self.visit(assignment): + yield stmt + + for stmt in self.visit(node.node): + yield stmt + + for assignment in node.assignments: + if assignment.local: + for stmt in self._leave_assignment(assignment.names): + yield stmt + + self._scopes.pop() + + def visit_Omit(self, node): + return self.visit_Condition(node) + + def visit_Condition(self, node): + target = "__condition" + assignment = self._engine(node.expression, target) + + assert assignment + + for stmt in assignment: + yield stmt + + body = self.visit(node.node) or [ast.Pass()] + + orelse = getattr(node, "orelse", None) + if orelse is not None: + orelse = self.visit(orelse) + + test = load(target) + + yield ast.If(test=test, body=body, orelse=orelse) + + def visit_Translate(self, node): + """Translation. + + Visit items and assign output to a default value. + + Finally, compile a translation expression and use either + result or default. + """ + + body = [] + + # Track the blocks of this translation + self._translations.append(set()) + + # Prepare new stream + append = identifier("append", id(node)) + stream = identifier("stream", id(node)) + body += template("s = new_list", s=stream, new_list=LIST) + \ + template("a = s.append", a=append, s=stream) + + # Visit body to generate the message body + code = self.visit(node.node) + swap(ast.Suite(body=code), load(append), "__append") + body += code + + # Reduce white space and assign as message id + msgid = identifier("msgid", id(node)) + body += template( + "msgid = __re_whitespace(''.join(stream)).strip()", + msgid=msgid, stream=stream + ) + + default = msgid + + # Compute translation block mapping if applicable + names = self._translations[-1] + if names: + keys = [] + values = [] + + for name in names: + stream, append = self._get_translation_identifiers(name) + keys.append(ast.Str(s=name)) + values.append(load(stream)) + + # Initialize value + body.insert( + 0, ast.Assign( + targets=[store(stream)], + value=ast.Str(s=native_string("")))) + + mapping = ast.Dict(keys=keys, values=values) + else: + mapping = None + + # if this translation node has a name, use it as the message id + if node.msgid: + msgid = ast.Str(s=node.msgid) + + # emit the translation expression + body += template( + "__append(translate(" + "msgid, mapping=mapping, default=default, domain=__i18n_domain, context=econtext))", + msgid=msgid, default=default, mapping=mapping + ) + + # pop away translation block reference + self._translations.pop() + + return body + + def visit_Start(self, node): + try: + line, column = node.prefix.location + except AttributeError: + line, column = 0, 0 + + yield Comment( + " %s%s ... (%d:%d)\n" + " --------------------------------------------------------" % ( + node.prefix, node.name, line, column)) + + if node.attributes: + for stmt in emit_node(ast.Str(s=node.prefix + node.name)): + yield stmt + + for attribute in node.attributes: + for stmt in self.visit(attribute): + yield stmt + + for stmt in emit_node(ast.Str(s=node.suffix)): + yield stmt + else: + for stmt in emit_node( + ast.Str(s=node.prefix + node.name + node.suffix)): + yield stmt + + def visit_End(self, node): + for stmt in emit_node(ast.Str( + s=node.prefix + node.name + node.space + node.suffix)): + yield stmt + + def visit_Attribute(self, node): + f = node.space + node.name + node.eq + node.quote + "%s" + node.quote + + # Static attributes are just outputted directly + if isinstance(node.expression, ast.Str): + s = f % node.expression.s + return template("__append(S)", S=ast.Str(s=s)) + + target = identifier("attr", node.name) + body = self._engine(node.expression, store(target)) + return body + template( + "if TARGET is not None: __append(FORMAT % TARGET)", + FORMAT=ast.Str(s=f), + TARGET=target, + ) + + def visit_Cache(self, node): + body = [] + + for expression in node.expressions: + name = identifier("cache", id(expression)) + target = store(name) + + # Skip re-evaluation + if self._expression_cache.get(expression): + continue + + body += self._engine(expression, target) + self._expression_cache[expression] = target + + body += self.visit(node.node) + + return body + + def visit_UseInternalMacro(self, node): + if node.name is None: + render = "render" + else: + render = "render_%s" % mangle(node.name) + + return template( + "f(__stream, econtext.copy(), rcontext, __i18n_domain)", + f=render) + \ + template("econtext.update(rcontext)") + + def visit_DefineSlot(self, node): + name = "__slot_%s" % mangle(node.name) + body = self.visit(node.node) + + self._slots.add(name) + + orelse = template( + "SLOT(__stream, econtext.copy(), rcontext)", + SLOT=name) + test = ast.Compare( + left=load(name), + ops=[ast.Is()], + comparators=[load("None")] + ) + + return [ + ast.If(test=test, body=body or [ast.Pass()], orelse=orelse) + ] + + def visit_Name(self, node): + """Translation name.""" + + if not self._translations: + raise TranslationError( + "Not allowed outside of translation.", node.name) + + if node.name in self._translations[-1]: + raise TranslationError( + "Duplicate translation name: %s." % node.name) + + self._translations[-1].add(node.name) + body = [] + + # prepare new stream + stream, append = self._get_translation_identifiers(node.name) + body += template("s = new_list", s=stream, new_list=LIST) + \ + template("a = s.append", a=append, s=stream) + + # generate code + code = self.visit(node.node) + swap(ast.Suite(body=code), load(append), "__append") + body += code + + # output msgid + text = Text('${%s}' % node.name) + body += self.visit(text) + + # Concatenate stream + body += template("stream = ''.join(stream)", stream=stream) + + return body + + def visit_CodeBlock(self, node): + stmts = template(textwrap.dedent(node.source.strip('\n'))) + + for stmt in stmts: + self._visitor(stmt) + + return stmts + + def visit_UseExternalMacro(self, node): + self._macros.append(node.extend) + + callbacks = [] + for slot in node.slots: + key = "__slot_%s" % mangle(slot.name) + fun = "__fill_%s" % mangle(slot.name) + + self._current_slot.append(slot.name) + + body = template("getitem = econtext.__getitem__") + \ + template("get = econtext.get") + \ + self.visit(slot.node) + + assert self._current_slot.pop() == slot.name + + callbacks.append( + ast.FunctionDef( + name=fun, + args=ast.arguments( + args=[ + param("__stream"), + param("econtext"), + param("rcontext"), + param("__i18n_domain"), + ], + defaults=[load("__i18n_domain")], + ), + body=body or [ast.Pass()], + )) + + key = ast.Str(s=key) + + assignment = template( + "_slots = econtext[KEY] = DEQUE((NAME,))", + KEY=key, NAME=fun, DEQUE=Symbol(collections.deque), + ) + + if node.extend: + append = template("_slots.appendleft(NAME)", NAME=fun) + + assignment = [ast.TryExcept( + body=template("_slots = getitem(KEY)", KEY=key), + handlers=[ast.ExceptHandler(body=assignment)], + orelse=append, + )] + + callbacks.extend(assignment) + + assert self._macros.pop() == node.extend + + assignment = self._engine(node.expression, store("__macro")) + + return ( + callbacks + \ + assignment + \ + template( + "__macro.include(__stream, econtext.copy(), " \ + "rcontext, __i18n_domain)") + \ + template("econtext.update(rcontext)") + ) + + def visit_Repeat(self, node): + # Used for loop variable definition and restore + self._scopes.append(set()) + + # Variable assignment and repeat key for single- and + # multi-variable repeat clause + if node.local: + contexts = "econtext", + else: + contexts = "econtext", "rcontext" + + for name in node.names: + if name in COMPILER_INTERNALS_OR_DISALLOWED: + raise TranslationError( + "Name disallowed by compiler.", name + ) + + if len(node.names) > 1: + targets = [ + ast.Tuple(elts=[ + subscript(native_string(name), load(context), ast.Store()) + for name in node.names], ctx=ast.Store()) + for context in contexts + ] + + key = ast.Tuple( + elts=[ast.Str(s=name) for name in node.names], + ctx=ast.Load()) + else: + name = node.names[0] + targets = [ + subscript(native_string(name), load(context), ast.Store()) + for context in contexts + ] + + key = ast.Str(s=node.names[0]) + + index = identifier("__index", id(node)) + assignment = [ast.Assign(targets=targets, value=load("__item"))] + + # Make repeat assignment in outer loop + names = node.names + local = node.local + + outer = self._engine(node.expression, store("__iterator")) + + if local: + outer[:] = list(self._enter_assignment(names)) + outer + + outer += template( + "__iterator, INDEX = getitem('repeat')(key, __iterator)", + key=key, INDEX=index + ) + + # Set a trivial default value for each name assigned to make + # sure we assign a value even if the iteration is empty + outer += [ast.Assign( + targets=[store_econtext(name) + for name in node.names], + value=load("None")) + ] + + # Compute inner body + inner = self.visit(node.node) + + # After each iteration, decrease the index + inner += template("index -= 1", index=index) + + # For items up to N - 1, emit repeat whitespace + inner += template( + "if INDEX > 0: __append(WHITESPACE)", + INDEX=index, WHITESPACE=ast.Str(s=node.whitespace) + ) + + # Main repeat loop + outer += [ast.For( + target=store("__item"), + iter=load("__iterator"), + body=assignment + inner, + )] + + # Finally, clean up assignment if it's local + if outer: + outer += self._leave_assignment(names) + + self._scopes.pop() + + return outer + + def _get_translation_identifiers(self, name): + assert self._translations + prefix = id(self._translations[-1]) + stream = identifier("stream_%d" % prefix, name) + append = identifier("append_%d" % prefix, name) + return stream, append + + def _enter_assignment(self, names): + for name in names: + for stmt in template( + "BACKUP = get(KEY, __marker)", + BACKUP=identifier("backup_%s" % name, id(names)), + KEY=ast.Str(s=native_string(name)), + ): + yield stmt + + def _leave_assignment(self, names): + for name in names: + for stmt in template( + "if BACKUP is __marker: del econtext[KEY]\n" + "else: econtext[KEY] = BACKUP", + BACKUP=identifier("backup_%s" % name, id(names)), + KEY=ast.Str(s=native_string(name)), + ): + yield stmt diff --git a/lib3/Chameleon-2.9.2/src/chameleon/config.py b/lib3/Chameleon-2.9.2/src/chameleon/config.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/config.py @@ -0,0 +1,47 @@ +import os +import logging + +log = logging.getLogger('chameleon.config') + +# Define which values are read as true +TRUE = ('y', 'yes', 't', 'true', 'on', '1') + +# If eager parsing is enabled, templates are parsed upon +# instantiation, rather than when first called upon; this mode is +# useful for verifying validity of templates across a project +EAGER_PARSING = os.environ.pop('CHAMELEON_EAGER', 'false') +EAGER_PARSING = EAGER_PARSING.lower() in TRUE + +# Debug mode is mostly useful for debugging the template engine +# itself. When enabled, generated source code is written to disk to +# ease step-debugging and some log levels are lowered to increase +# output. Also, the generated source code is available in the +# ``source`` attribute of the template instance if compilation +# succeeded. +DEBUG_MODE = os.environ.pop('CHAMELEON_DEBUG', 'false') +DEBUG_MODE = DEBUG_MODE.lower() in TRUE + +# If a cache directory is specified, template source code will be +# persisted on disk and reloaded between sessions +path = os.environ.pop('CHAMELEON_CACHE', None) +if path is not None: + CACHE_DIRECTORY = os.path.abspath(path) + if not os.path.exists(CACHE_DIRECTORY): + raise ValueError( + "Cache directory does not exist: %s." % CACHE_DIRECTORY + ) + log.info("directory cache: %s." % CACHE_DIRECTORY) +else: + CACHE_DIRECTORY = None + +# When auto-reload is enabled, templates are reloaded on file change. +AUTO_RELOAD = os.environ.pop('CHAMELEON_RELOAD', 'false') +AUTO_RELOAD = AUTO_RELOAD.lower() in TRUE + +for key in os.environ: + if key.lower().startswith('chameleon'): + log.warn("unknown environment variable set: \"%s\"." % key) + +# This is the slice length of the expression displayed in the +# formatted exception string +SOURCE_EXPRESSION_MARKER_LENGTH = 60 diff --git a/lib3/Chameleon-2.9.2/src/chameleon/exc.py b/lib3/Chameleon-2.9.2/src/chameleon/exc.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/exc.py @@ -0,0 +1,289 @@ +# -*- coding: utf-8 -*- + +import traceback + +from .utils import format_kwargs +from .utils import safe_native +from .tokenize import Token +from .config import SOURCE_EXPRESSION_MARKER_LENGTH as LENGTH + + +def compute_source_marker(line, column, expression, size): + """Computes source marker location string. + + >>> def test(l, c, e, s): + ... s, marker = compute_source_marker(l, c, e, s) + ... out = s + '\\n' + marker + ... + ... # Replace dot with middle-dot to work around doctest ellipsis + ... print(out.replace('...', '??????')) + + >>> test('foo bar', 4, 'bar', 7) + foo bar + ^^^ + + >>> test('foo ${bar}', 4, 'bar', 10) + foo ${bar} + ^^^ + + >>> test(' foo bar', 6, 'bar', 6) + ?????? oo bar + ^^^ + + >>> test(' foo bar baz ', 6, 'bar', 6) + ?????? o bar ?????? + ^^^ + + The entire expression is always shown, even if ``size`` does not + accomodate for it. + + >>> test(' foo bar baz ', 6, 'bar baz', 10) + ?????? oo bar baz + ^^^^^^^ + + >>> test(' foo bar', 10, 'bar', 5) + ?????? o bar + ^^^ + + >>> test(' foo bar', 10, 'boo', 5) + ?????? o bar + ^ + + """ + + s = line.lstrip() + column -= len(line) - len(s) + s = s.rstrip() + + try: + i = s[column:].index(expression) + except ValueError: + # If we can't find the expression + # (this shouldn't happen), simply + # use a standard size marker + marker = "^" + else: + column += i + marker = "^" * len(expression) + + if len(expression) > size: + offset = column + size = len(expression) + else: + window = (size - len(expression)) / 2.0 + offset = column - window + offset -= min(3, max(0, column + window + len(expression) - len(s))) + offset = int(offset) + + if offset > 0: + s = s[offset:] + r = s.lstrip() + d = len(s) - len(r) + s = "... " + r + column += 4 - d + column -= offset + + # This also adds to the displayed length + size += 4 + + if len(s) > size: + s = s[:size].rstrip() + " ..." + + return s, column * " " + marker + + +def ellipsify(string, limit): + if len(string) > limit: + return "... " + string[-(limit - 4):] + + +def reconstruct_exc(cls, state): + exc = Exception.__new__(cls) + exc.__dict__ = state + return exc + + +class TemplateError(Exception): + """An error raised by Chameleon. + + >>> from chameleon.tokenize import Token + >>> token = Token('token') + >>> message = 'message' + + Make sure the exceptions can be copied: + + >>> from copy import copy + >>> copy(TemplateError(message, token)) + TemplateError('message', 'token') + + And pickle/unpickled: + + >>> from pickle import dumps, loads + >>> loads(dumps(TemplateError(message, token))) + TemplateError('message', 'token') + + """ + + def __init__(self, msg, token): + if not isinstance(token, Token): + token = Token(token, 0) + + self.msg = msg + self.token = safe_native(token) + self.offset = getattr(token, "pos", 0) + self.filename = token.filename + + def __copy__(self): + inst = Exception.__new__(type(self)) + inst.__dict__ = self.__dict__.copy() + return inst + + def __reduce__(self): + return reconstruct_exc, (type(self), self.__dict__) + + def __str__(self): + text = "%s\n\n" % self.msg + text += " - String: \"%s\"" % self.token + + if self.filename: + text += "\n" + text += " - Filename: %s" % self.filename + + try: + line, column = self.token.location + except AttributeError: + pass + else: + text += "\n" + text += " - Location: (%d:%d)" % (line, column) + + return text + + def __repr__(self): + try: + return "%s('%s', '%s')" % ( + self.__class__.__name__, self.msg, self.token + ) + except AttributeError: + return object.__repr__(self) + + +class ParseError(TemplateError): + """An error occurred during parsing. + + Indicates an error on the structural level. + """ + + +class CompilationError(TemplateError): + """An error occurred during compilation. + + Indicates a general compilation error. + """ + + +class TranslationError(TemplateError): + """An error occurred during translation. + + Indicates a general translation error. + """ + + +class LanguageError(CompilationError): + """Language syntax error. + + Indicates a syntactical error due to incorrect usage of the + template language. + """ + + +class ExpressionError(LanguageError): + """An error occurred compiling an expression. + + Indicates a syntactical error in an expression. + """ + + +class ExceptionFormatter(object): + def __init__(self, errors, econtext, rcontext): + kwargs = rcontext.copy() + kwargs.update(econtext) + + for name in tuple(kwargs): + if name.startswith('__'): + del kwargs[name] + + self._errors = errors + self._kwargs = kwargs + + def __call__(self): + # Format keyword arguments; consecutive arguments are indented + # for readability + try: + formatted = format_kwargs(self._kwargs) + except: + # the ``pprint.pformat`` method calls the representation + # method of the arguments; this may fail and since we're + # already in an exception handler, there's no point in + # pursuing this further + formatted = () + + for index, string in enumerate(formatted[1:]): + formatted[index + 1] = " " * 15 + string + + out = [] + seen = set() + + for error in reversed(self._errors): + expression, line, column, filename, exc = error + + if exc in seen: + continue + + seen.add(exc) + + if isinstance(exc, UnicodeDecodeError): + string = safe_native(exc.object) + + s, marker = compute_source_marker( + string, exc.start, string[exc.start:exc.end], LENGTH + ) + + out.append(" - Stream: %s" % s) + out.append(" %s" % marker) + + _filename = ellipsify(filename, 60) if filename else "<string>" + + out.append(" - Expression: \"%s\"" % expression) + out.append(" - Filename: %s" % _filename) + out.append(" - Location: (%d:%d)" % (line, column)) + + if filename and line and column: + try: + f = open(filename, 'r') + except IOError: + pass + else: + try: + # Pick out source line and format marker + for i, l in enumerate(f): + if i + 1 == line: + s, marker = compute_source_marker( + l, column, expression, LENGTH + ) + + out.append(" - Source: %s" % s) + out.append(" %s" % marker) + break + finally: + f.close() + + out.append(" - Arguments: %s" % "\n".join(formatted)) + + formatted = traceback.format_exception_only(type(exc), exc)[-1] + formatted_class = "%s:" % type(exc).__name__ + + if formatted.startswith(formatted_class): + formatted = formatted[len(formatted_class):].lstrip() + + return "\n".join(map(safe_native, [formatted] + out)) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/i18n.py b/lib3/Chameleon-2.9.2/src/chameleon/i18n.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/i18n.py @@ -0,0 +1,120 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import re + +from .exc import CompilationError +from .utils import unicode_string + +NAME_RE = r"[a-zA-Z][-a-zA-Z0-9_]*" + +WHITELIST = frozenset([ + "translate", + "domain", + "target", + "source", + "attributes", + "data", + "name", + "mode", + "xmlns", + "xml" + ]) + +_interp_regex = re.compile(r'(?<!\$)(\$(?:(%(n)s)|{(%(n)s)}))' + % ({'n': NAME_RE})) + + +try: # pragma: no cover + str = unicode +except NameError: + pass + +try: # pragma: no cover + # optional: `zope.i18n`, `zope.i18nmessageid` + from zope.i18n import interpolate + from zope.i18n import translate + from zope.i18nmessageid import Message +except ImportError: # pragma: no cover + + def fast_translate(msgid, domain=None, mapping=None, context=None, + target_language=None, default=None): + if default is None: + return msgid + + if mapping: + def replace(match): + whole, param1, param2 = match.groups() + return unicode_string(mapping.get(param1 or param2, whole)) + return _interp_regex.sub(replace, default) + + return default +else: # pragma: no cover + def fast_translate(msgid, domain=None, mapping=None, context=None, + target_language=None, default=None): + if msgid is None: + return + + if target_language is not None or context is not None: + result = translate( + msgid, domain=domain, mapping=mapping, context=context, + target_language=target_language, default=default) + if result != msgid: + return result + + if isinstance(msgid, Message): + default = msgid.default + mapping = msgid.mapping + + if default is None: + default = str(msgid) + + if not isinstance(default, basestring): + return default + + return interpolate(default, mapping) + + +def parse_attributes(attrs, xml=True): + d = {} + + # filter out empty items, eg: + # i18n:attributes="value msgid; name msgid2;" + # would result in 3 items where the last one is empty + attrs = [spec for spec in attrs.split(";") if spec] + + for spec in attrs: + if ',' in spec: + raise CompilationError( + "Attribute must not contain comma. Use semicolon to " + "list multiple attributes", spec + ) + parts = spec.split() + if len(parts) == 2: + attr, msgid = parts + elif len(parts) == 1: + attr = parts[0] + msgid = None + else: + raise CompilationError( + "Illegal i18n:attributes specification.", spec) + if not xml: + attr = attr.lower() + attr = attr.strip() + if attr in d: + raise CompilationError( + "Attribute may only be specified once in i18n:attributes", attr) + d[attr] = msgid + + return d diff --git a/lib3/Chameleon-2.9.2/src/chameleon/interfaces.py b/lib3/Chameleon-2.9.2/src/chameleon/interfaces.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/interfaces.py @@ -0,0 +1,102 @@ +from zope.interface import Interface +from zope.interface import Attribute + + +class ITALExpressionErrorInfo(Interface): + + type = Attribute("type", + "The exception class.") + + value = Attribute("value", + "The exception instance.") + + lineno = Attribute("lineno", + "The line number the error occurred on in the source.") + + offset = Attribute("offset", + "The character offset at which the error occurred.") + + +class ITALIterator(Interface): # pragma: no cover + """A TAL iterator + + Not to be confused with a Python iterator. + """ + + def next(): + """Advance to the next value in the iteration, if possible + + Return a true value if it was possible to advance and return + a false value otherwise. + """ + + +class ITALESIterator(ITALIterator): # pragma: no cover + """TAL Iterator provided by TALES + + Values of this iterator are assigned to items in the repeat namespace. + + For example, with a TAL statement like: tal:repeat="item items", + an iterator will be assigned to "repeat/item". The iterator + provides a number of handy methods useful in writing TAL loops. + + The results are undefined of calling any of the methods except + 'length' before the first iteration. + """ + + def index(): + """Return the position (starting with "0") within the iteration + """ + + def number(): + """Return the position (starting with "1") within the iteration + """ + + def even(): + """Return whether the current position is even + """ + + def odd(): + """Return whether the current position is odd + """ + + def parity(): + """Return 'odd' or 'even' depending on the position's parity + + Useful for assigning CSS class names to table rows. + """ + + def start(): + """Return whether the current position is the first position + """ + + def end(): + """Return whether the current position is the last position + """ + + def letter(): + """Return the position (starting with "a") within the iteration + """ + + def Letter(): + """Return the position (starting with "A") within the iteration + """ + + def roman(): + """Return the position (starting with "i") within the iteration + """ + + def Roman(): + """Return the position (starting with "I") within the iteration + """ + + def item(): + """Return the item at the current position + """ + + def length(): + """Return the length of the sequence + + Note that this may fail if the TAL iterator was created on a Python + iterator. + """ diff --git a/lib3/Chameleon-2.9.2/src/chameleon/loader.py b/lib3/Chameleon-2.9.2/src/chameleon/loader.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/loader.py @@ -0,0 +1,174 @@ +import functools +import imp +import logging +import os +import py_compile +import shutil +import sys +import tempfile +import warnings + +import pkg_resources + +log = logging.getLogger('chameleon.loader') + +try: + str = unicode +except NameError: + basestring = str + + +def cache(func): + def load(self, *args, **kwargs): + template = self.registry.get(args) + if template is None: + self.registry[args] = template = func(self, *args, **kwargs) + return template + return load + + +def abspath_from_asset_spec(spec): + pname, filename = spec.split(':', 1) + return pkg_resources.resource_filename(pname, filename) + +if os.name == "nt": + def abspath_from_asset_spec(spec, f=abspath_from_asset_spec): + if spec[1] == ":": + return spec + return f(spec) + + +class TemplateLoader(object): + """Template loader class. + + To load templates using relative filenames, pass a sequence of + paths (or a single path) as ``search_path``. + + To apply a default filename extension to inputs which do not have + an extension already (i.e. no dot), provide this as + ``default_extension`` (e.g. ``'.pt'``). + + Additional keyword-arguments will be passed on to the template + constructor. + """ + + default_extension = None + + def __init__(self, search_path=None, default_extension=None, **kwargs): + if search_path is None: + search_path = [] + if isinstance(search_path, basestring): + search_path = [search_path] + if default_extension is not None: + self.default_extension = ".%s" % default_extension.lstrip('.') + self.search_path = search_path + self.registry = {} + self.kwargs = kwargs + + @cache + def load(self, spec, cls=None): + if cls is None: + raise ValueError("Unbound template loader.") + + spec = spec.strip() + + if self.default_extension is not None and '.' not in spec: + spec += self.default_extension + + if ':' in spec: + spec = abspath_from_asset_spec(spec) + + if os.path.isabs(spec): + return cls(spec, **self.kwargs) + + for path in self.search_path: + path = os.path.join(path, spec) + if os.path.exists(path): + return cls(path, **self.kwargs) + + raise ValueError("Template not found: %s." % spec) + + def bind(self, cls): + return functools.partial(self.load, cls=cls) + + +class MemoryLoader(object): + def build(self, source, filename): + code = compile(source, filename, 'exec') + env = {} + exec(code, env) + return env + + def get(self, name): + return None + + +class ModuleLoader(object): + def __init__(self, path, remove=False): + self.path = path + self.remove = remove + + def __del__(self, shutil=shutil): + if not self.remove: + return + try: + shutil.rmtree(self.path) + except: + warnings.warn("Could not clean up temporary file path: %s" % (self.path,)) + + def get(self, filename): + path = os.path.join(self.path, filename) + if os.path.exists(path): + log.debug("loading module from cache: %s." % filename) + base, ext = os.path.splitext(filename) + return self._load(base, path) + else: + log.debug('cache miss: %s' % filename) + + def build(self, source, filename): + imp.acquire_lock() + try: + d = self.get(filename) + if d is not None: + return d + + base, ext = os.path.splitext(filename) + name = os.path.join(self.path, base + ".py") + + log.debug("writing source to disk (%d bytes)." % len(source)) + fd, fn = tempfile.mkstemp(prefix=base, suffix='.tmp', dir=self.path) + temp = os.fdopen(fd, 'w') + + try: + try: + temp.write("%s\n" % '# -*- coding: utf-8 -*-') + temp.write(source) + finally: + temp.close() + except: + os.remove(fn) + raise + + os.rename(fn, name) + log.debug("compiling %s into byte-code..." % filename) + py_compile.compile(name) + + return self._load(base, name) + finally: + imp.release_lock() + + def _load(self, base, filename): + imp.acquire_lock() + try: + module = sys.modules.get(base) + if module is None: + f = open(filename, 'rb') + try: + assert base not in sys.modules + module = imp.load_source(base, filename, f) + finally: + f.close() + finally: + imp.release_lock() + + return module.__dict__ diff --git a/lib3/Chameleon-2.9.2/src/chameleon/metal.py b/lib3/Chameleon-2.9.2/src/chameleon/metal.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/metal.py @@ -0,0 +1,23 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +WHITELIST = frozenset([ + "define-macro", + "extend-macro", + "use-macro", + "define-slot", + "fill-slot", + "xmlns", + "xml" + ]) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/namespaces.py b/lib3/Chameleon-2.9.2/src/chameleon/namespaces.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/namespaces.py @@ -0,0 +1,9 @@ +XML_NS = "http://www.w3.org/XML/1998/namespace" +XMLNS_NS = "http://www.w3.org/2000/xmlns/" +XHTML_NS = "http://www.w3.org/1999/xhtml" +TAL_NS = "http://xml.zope.org/namespaces/tal" +META_NS = "http://xml.zope.org/namespaces/meta" +METAL_NS = "http://xml.zope.org/namespaces/metal" +XI_NS = "http://www.w3.org/2001/XInclude" +I18N_NS = "http://xml.zope.org/namespaces/i18n" +PY_NS = "http://genshi.edgewall.org/" diff --git a/lib3/Chameleon-2.9.2/src/chameleon/nodes.py b/lib3/Chameleon-2.9.2/src/chameleon/nodes.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/nodes.py @@ -0,0 +1,210 @@ +from .astutil import Node + + +class UseExternalMacro(Node): + """Extend external macro.""" + + _fields = "expression", "slots", "extend" + + +class Sequence(Node): + """Element sequence.""" + + _fields = "items", + + +class Content(Node): + """Content substitution.""" + + _fields = "expression", "char_escape", "translate" + + +class Default(Node): + """Represents a default value.""" + + _fields = "marker", + + +class CodeBlock(Node): + _fields = "source", + + +class Value(Node): + """Expression object value.""" + + _fields = "value", + + def __repr__(self): + try: + line, column = self.value.location + except AttributeError: + line, column = 0, 0 + + return "<%s %r (%d:%d)>" % ( + type(self).__name__, self.value, line, column + ) + + +class Substitution(Value): + """Expression value for text substitution.""" + + _fields = "value", "char_escape", "default" + + default = None + + +class Boolean(Value): + _fields = "value", "s" + + +class Negate(Node): + """Wraps an expression with a negation.""" + + _fields = "value", + + +class Element(Node): + """XML element.""" + + _fields = "start", "end", "content" + + +class Attribute(Node): + """Element attribute.""" + + _fields = "name", "expression", "quote", "eq", "space" + + +class Start(Node): + """Start-tag.""" + + _fields = "name", "prefix", "suffix", "attributes" + + +class End(Node): + """End-tag.""" + + _fields = "name", "space", "prefix", "suffix" + + +class Condition(Node): + """Node visited only if some condition holds.""" + + _fields = "expression", "node", "orelse" + + +class Identity(Node): + """Condition expression that is true on identity.""" + + _fields = "expression", "value" + + +class Equality(Node): + """Condition expression that is true on equality.""" + + _fields = "expression", "value" + + +class Cache(Node): + """Cache (evaluate only once) the value of ``expression`` inside + ``node``. + """ + + _fields = "expressions", "node" + + +class Assignment(Node): + """Variable assignment.""" + + _fields = "names", "expression", "local" + + +class Alias(Assignment): + """Alias assignment. + + Note that ``expression`` should be a cached or global value. + """ + + local = False + + +class Define(Node): + """Variable definition in scope.""" + + _fields = "assignments", "node" + + +class Repeat(Assignment): + """Iterate over provided assignment and repeat body.""" + + _fields = "names", "expression", "local", "whitespace", "node" + + +class Macro(Node): + """Macro definition.""" + + _fields = "name", "body" + + +class Program(Node): + _fields = "name", "body" + + +class Module(Node): + _fields = "name", "program", + + +class Context(Node): + _fields = "node", + + +class Text(Node): + """Static text output.""" + + _fields = "value", + + +class Interpolation(Text): + """String interpolation output.""" + + _fields = "value", "braces_required", "translation" + + +class Translate(Node): + """Translate node.""" + + _fields = "msgid", "node" + + +class Name(Node): + """Translation name.""" + + _fields = "name", "node" + + +class Domain(Node): + """Update translation domain.""" + + _fields = "name", "node" + + +class OnError(Node): + _fields = "fallback", "name", "node" + + +class UseInternalMacro(Node): + """Use internal macro (defined inside same program).""" + + _fields = "name", + + +class FillSlot(Node): + """Fill a macro slot.""" + + _fields = "name", "node" + + +class DefineSlot(Node): + """Define a macro slot.""" + + _fields = "name", "node" diff --git a/lib3/Chameleon-2.9.2/src/chameleon/parser.py b/lib3/Chameleon-2.9.2/src/chameleon/parser.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/parser.py @@ -0,0 +1,238 @@ +import re +import logging + +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict + +from .exc import ParseError +from .namespaces import XML_NS +from .tokenize import Token + +match_tag_prefix_and_name = re.compile( + r'^(?P<prefix></?)(?P<name>([^:\n ]+:)?[^ \n\t>/]+)' + '(?P<suffix>(?P<space>\s*)/?>)?', + re.UNICODE | re.DOTALL) +match_single_attribute = re.compile( + r'(?P<space>\s+)(?!\d)' + r'(?P<name>[^ =/>\n\t]+)' + r'((?P<eq>\s*=\s*)' + r'((?P<quote>[\'"])(?P<value>.*?)(?P=quote)|' + r'(?P<alt_value>[^\s\'">/]+))|' + r'(?P<simple_value>(?![ \\n\\t\\r]*=)))', + re.UNICODE | re.DOTALL) +match_comment = re.compile( + r'^<!--(?P<text>.*)-->$', re.DOTALL) +match_cdata = re.compile( + r'^<!\[CDATA\[(?P<text>.*)\]>$', re.DOTALL) +match_declaration = re.compile( + r'^<!(?P<text>[^>]+)>$', re.DOTALL) +match_processing_instruction = re.compile( + r'^<\?(?P<name>\w+)(?P<text>.*?)\?>', re.DOTALL) +match_xml_declaration = re.compile(r'^<\?xml(?=[ /])', re.DOTALL) + +log = logging.getLogger('chameleon.parser') + + +def substitute(regex, repl, token): + if not isinstance(token, Token): + token = Token(token) + + return Token( + regex.sub(repl, token), + token.pos, + token.source, + token.filename + ) + + +def groups(m, token): + result = [] + for i, group in enumerate(m.groups()): + if group is not None: + j, k = m.span(i + 1) + group = token[j:k] + + result.append(group) + + return tuple(result) + + +def groupdict(m, token): + d = m.groupdict() + for name, value in d.items(): + if value is not None: + i, j = m.span(name) + d[name] = token[i:j] + + return d + + +def match_tag(token, regex=match_tag_prefix_and_name): + m = regex.match(token) + d = groupdict(m, token) + + end = m.end() + token = token[end:] + + attrs = d['attrs'] = [] + for m in match_single_attribute.finditer(token): + attr = groupdict(m, token) + alt_value = attr.pop('alt_value', None) + if alt_value is not None: + attr['value'] = alt_value + attr['quote'] = '' + simple_value = attr.pop('simple_value', None) + if simple_value is not None: + attr['quote'] = '' + attr['value'] = '' + attr['eq'] = '' + attrs.append(attr) + d['suffix'] = token[m.end():] + + return d + + +def parse_tag(token, namespace): + node = match_tag(token) + + update_namespace(node['attrs'], namespace) + + if ':' in node['name']: + prefix = node['name'].split(':')[0] + else: + prefix = None + + default = node['namespace'] = namespace.get(prefix, XML_NS) + + node['ns_attrs'] = unpack_attributes( + node['attrs'], namespace, default) + + return node + + +def update_namespace(attributes, namespace): + # possibly update namespaces; we do this in a separate step + # because this assignment is irrespective of order + for attribute in attributes: + name = attribute['name'] + value = attribute['value'] + if name == 'xmlns': + namespace[None] = value + elif name.startswith('xmlns:'): + namespace[name[6:]] = value + + +def unpack_attributes(attributes, namespace, default): + namespaced = OrderedDict() + + for index, attribute in enumerate(attributes): + name = attribute['name'] + value = attribute['value'] + + if ':' in name: + prefix = name.split(':')[0] + name = name[len(prefix) + 1:] + try: + ns = namespace[prefix] + except KeyError: + raise KeyError( + "Undefined namespace prefix: %s." % prefix) + else: + ns = default + namespaced[ns, name] = value + + return namespaced + + +def identify(string): + if string.startswith("<"): + if string.startswith("<!--"): + return "comment" + if string.startswith("<![CDATA["): + return "cdata" + if string.startswith("<!"): + return "declaration" + if string.startswith("<?xml"): + return "xml_declaration" + if string.startswith("<?"): + return "processing_instruction" + if string.startswith("</"): + return "end_tag" + if string.endswith("/>"): + return "empty_tag" + if string.endswith(">"): + return "start_tag" + return "error" + return "text" + + +class ElementParser(object): + """Parses tokens into elements.""" + + def __init__(self, stream, default_namespaces): + self.stream = stream + self.queue = [] + self.index = [] + self.namespaces = [default_namespaces.copy()] + + def __iter__(self): + for token in self.stream: + item = self.parse(token) + self.queue.append(item) + + return iter(self.queue) + + def parse(self, token): + kind = identify(token) + visitor = getattr(self, "visit_%s" % kind, self.visit_default) + return visitor(kind, token) + + def visit_comment(self, kind, token): + return "comment", (token, ) + + def visit_cdata(self, kind, token): + return "cdata", (token, ) + + def visit_default(self, kind, token): + return "default", (token, ) + + def visit_processing_instruction(self, kind, token): + m = match_processing_instruction.match(token) + return "processing_instruction", (groupdict(m, token), ) + + def visit_text(self, kind, token): + return kind, (token, ) + + def visit_start_tag(self, kind, token): + namespace = self.namespaces[-1].copy() + self.namespaces.append(namespace) + node = parse_tag(token, namespace) + self.index.append((node['name'], len(self.queue))) + return kind, (node, ) + + def visit_end_tag(self, kind, token): + try: + namespace = self.namespaces.pop() + except IndexError: + raise ParseError("Unexpected end tag.", token) + + node = parse_tag(token, namespace) + + while self.index: + name, pos = self.index.pop() + if name == node['name']: + start, = self.queue.pop(pos)[1] + children = self.queue[pos:] + del self.queue[pos:] + break + else: + raise ParseError("Unexpected end tag.", token) + + return "element", (start, node, children) + + def visit_empty_tag(self, kind, token): + namespace = self.namespaces[-1].copy() + node = parse_tag(token, namespace) + return "element", (node, None, []) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/program.py b/lib3/Chameleon-2.9.2/src/chameleon/program.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/program.py @@ -0,0 +1,38 @@ +try: + str = unicode +except NameError: + long = int + +from .tokenize import iter_xml +from .tokenize import iter_text +from .parser import ElementParser +from .namespaces import XML_NS +from .namespaces import XMLNS_NS + + +class ElementProgram(object): + DEFAULT_NAMESPACES = { + 'xmlns': XMLNS_NS, + 'xml': XML_NS, + } + + tokenizers = { + 'xml': iter_xml, + 'text': iter_text, + } + + def __init__(self, source, mode="xml", filename=None): + tokenizer = self.tokenizers[mode] + tokens = tokenizer(source, filename) + parser = ElementParser(tokens, self.DEFAULT_NAMESPACES) + + self.body = [] + + for kind, args in parser: + node = self.visit(kind, args) + if node is not None: + self.body.append(node) + + def visit(self, kind, args): + visitor = getattr(self, "visit_%s" % kind) + return visitor(*args) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/py25.py b/lib3/Chameleon-2.9.2/src/chameleon/py25.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/py25.py @@ -0,0 +1,36 @@ +import sys + +def lookup_attr(obj, key): + try: + return getattr(obj, key) + except AttributeError: + exc = sys.exc_info()[1] + try: + get = obj.__getitem__ + except AttributeError: + raise exc + try: + return get(key) + except KeyError: + raise exc + +def exec_(code, globs=None, locs=None): + """Execute code in a namespace.""" + if globs is None: + frame = sys._getframe(1) + globs = frame.f_globals + if locs is None: + locs = frame.f_locals + del frame + elif locs is None: + locs = globs + exec("""exec code in globs, locs""") + + +exec_("""def raise_with_traceback(exc, tb): + raise type(exc), exc, tb +""") + + +def next(iter): + return iter.next() diff --git a/lib3/Chameleon-2.9.2/src/chameleon/py26.py b/lib3/Chameleon-2.9.2/src/chameleon/py26.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/py26.py @@ -0,0 +1,15 @@ +import sys + +def lookup_attr(obj, key): + try: + return getattr(obj, key) + except AttributeError: + exc = sys.exc_info()[1] + try: + get = obj.__getitem__ + except AttributeError: + raise exc + try: + return get(key) + except KeyError: + raise exc diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tal.py b/lib3/Chameleon-2.9.2/src/chameleon/tal.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tal.py @@ -0,0 +1,479 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import re +import copy + +from .exc import LanguageError +from .utils import descriptorint +from .utils import descriptorstr +from .namespaces import XMLNS_NS +from .parser import groups + + +try: + next +except NameError: + from chameleon.py25 import next + +try: + # optional library: `zope.interface` + import interfaces + import zope.interface +except ImportError: + interfaces = None + + +NAME = r"[a-zA-Z_][-a-zA-Z0-9_]*" +DEFINE_RE = re.compile(r"(?s)\s*(?:(global|local)\s+)?" + + r"(%s|\(%s(?:,\s*%s)*\))\s+(.*)\Z" % (NAME, NAME, NAME), + re.UNICODE) +SUBST_RE = re.compile(r"\s*(?:(text|structure)\s+)?(.*)\Z", re.S | re.UNICODE) +ATTR_RE = re.compile(r"\s*([^\s]+)\s+([^\s].*)\Z", re.S | re.UNICODE) + +ENTITY_RE = re.compile(r'(&(#?)(x?)(\d{1,5}|\w{1,8});)') + +WHITELIST = frozenset([ + "define", + "comment", + "condition", + "content", + "replace", + "repeat", + "attributes", + "on-error", + "omit-tag", + "script", + "switch", + "case", + "xmlns", + "xml" + ]) + + +def split_parts(arg): + # Break in pieces at undoubled semicolons and + # change double semicolons to singles: + i = 0 + while i < len(arg): + m = ENTITY_RE.search(arg[i:]) + if m is None: + break + arg = arg[:i + m.end()] + ';' + arg[i + m.end():] + i += m.end() + + arg = arg.replace(";;", "\0") + parts = arg.split(';') + parts = [p.replace("\0", ";") for p in parts] + if len(parts) > 1 and not parts[-1].strip(): + del parts[-1] # It ended in a semicolon + + return parts + + +def parse_attributes(clause): + attrs = {} + for part in split_parts(clause): + m = ATTR_RE.match(part) + if not m: + raise LanguageError( + "Bad syntax in attributes.", clause) + name, expr = groups(m, part) + if name in attrs: + raise LanguageError( + "Duplicate attribute name in attributes.", part) + + attrs[name] = expr + + return attrs + + +def parse_substitution(clause): + m = SUBST_RE.match(clause) + if m is None: + raise LanguageError( + "Invalid content substitution syntax.", clause) + + key, expression = groups(m, clause) + if not key: + key = "text" + + return key, expression + + +def parse_defines(clause): + """ + Parses a tal:define value. + + # Basic syntax, implicit local + >>> parse_defines('hello lovely') + [('local', ('hello',), 'lovely')] + + # Explicit local + >>> parse_defines('local hello lovely') + [('local', ('hello',), 'lovely')] + + # With global + >>> parse_defines('global hello lovely') + [('global', ('hello',), 'lovely')] + + # Multiple expressions + >>> parse_defines('hello lovely; tea time') + [('local', ('hello',), 'lovely'), ('local', ('tea',), 'time')] + + # With multiple names + >>> parse_defines('(hello, howdy) lovely') + [('local', ['hello', 'howdy'], 'lovely')] + + # With unicode whitespace + >>> try: + ... s = '\xc2\xa0hello lovely'.decode('utf-8') + ... except AttributeError: + ... s = '\xa0hello lovely' + >>> from chameleon.utils import unicode_string + >>> parse_defines(s) == [ + ... ('local', ('hello',), 'lovely') + ... ] + True + + """ + defines = [] + for part in split_parts(clause): + m = DEFINE_RE.match(part) + if m is None: + raise LanguageError("Invalid define syntax", part) + context, name, expr = groups(m, part) + context = context or "local" + + if name.startswith('('): + names = [n.strip() for n in name.strip('()').split(',')] + else: + names = (name,) + + defines.append((context, names, expr)) + + return defines + + +def prepare_attributes(attrs, dyn_attributes, i18n_attributes, + ns_attributes, drop_ns): + drop = set([attribute['name'] for attribute, (ns, value) + in zip(attrs, ns_attributes) + if ns in drop_ns or ( + ns == XMLNS_NS and + attribute['value'] in drop_ns + ) + ]) + + attributes = [] + normalized = {} + + for attribute in attrs: + name = attribute['name'] + + if name in drop: + continue + + attributes.append(( + name, + attribute['value'], + attribute['quote'], + attribute['space'], + attribute['eq'], + None, + )) + + normalized[name.lower()] = len(attributes) - 1 + + for name, expr in dyn_attributes.items(): + index = normalized.get(name.lower()) + if index is not None: + _, text, quote, space, eq, _ = attributes[index] + add = attributes.__setitem__ + else: + text = None + quote = '"' + space = " " + eq = "=" + index = len(attributes) + add = attributes.insert + normalized[name.lower()] = len(attributes) - 1 + + attribute = name, text, quote, space, eq, expr + add(index, attribute) + + for name in i18n_attributes: + attr = name.lower() + if attr not in normalized: + attributes.append((name, name, '"', " ", "=", None)) + normalized[attr] = len(attributes) - 1 + + return attributes + + +class RepeatItem(object): + __slots__ = "length", "_iterator" + + __allow_access_to_unprotected_subobjects__ = True + + def __init__(self, iterator, length): + self.length = length + self._iterator = iterator + + def __iter__(self): + return self._iterator + + try: + iter(()).__len__ + except AttributeError: + @descriptorint + def index(self): + try: + remaining = self._iterator.__length_hint__() + except AttributeError: + remaining = len(tuple(copy.copy(self._iterator))) + return self.length - remaining - 1 + else: + @descriptorint + def index(self): + remaining = self._iterator.__len__() + return self.length - remaining - 1 + + @descriptorint + def start(self): + return self.index == 0 + + @descriptorint + def end(self): + return self.index == self.length - 1 + + @descriptorint + def number(self): + return self.index + 1 + + @descriptorstr + def odd(self): + """Returns a true value if the item index is odd. + + >>> it = RepeatItem(iter(("apple", "pear")), 2) + + >>> next(it._iterator) + 'apple' + >>> it.odd() + '' + + >>> next(it._iterator) + 'pear' + >>> it.odd() + 'odd' + """ + + return self.index % 2 == 1 and 'odd' or '' + + @descriptorstr + def even(self): + """Returns a true value if the item index is even. + + >>> it = RepeatItem(iter(("apple", "pear")), 2) + + >>> next(it._iterator) + 'apple' + >>> it.even() + 'even' + + >>> next(it._iterator) + 'pear' + >>> it.even() + '' + """ + + return self.index % 2 == 0 and 'even' or '' + + def next(self): + raise NotImplementedError( + "Method not implemented (can't update local variable).") + + def _letter(self, base=ord('a'), radix=26): + """Get the iterator position as a lower-case letter + + >>> it = RepeatItem(iter(("apple", "pear", "orange")), 3) + >>> next(it._iterator) + 'apple' + >>> it.letter() + 'a' + >>> next(it._iterator) + 'pear' + >>> it.letter() + 'b' + >>> next(it._iterator) + 'orange' + >>> it.letter() + 'c' + """ + + index = self.index + if index < 0: + raise TypeError("No iteration position") + s = "" + while 1: + index, off = divmod(index, radix) + s = chr(base + off) + s + if not index: + return s + + letter = descriptorstr(_letter) + + @descriptorstr + def Letter(self): + """Get the iterator position as an upper-case letter + + >>> it = RepeatItem(iter(("apple", "pear", "orange")), 3) + >>> next(it._iterator) + 'apple' + >>> it.Letter() + 'A' + >>> next(it._iterator) + 'pear' + >>> it.Letter() + 'B' + >>> next(it._iterator) + 'orange' + >>> it.Letter() + 'C' + """ + + return self._letter(base=ord('A')) + + @descriptorstr + def Roman(self, rnvalues=( + (1000, 'M'), (900, 'CM'), (500, 'D'), (400, 'CD'), + (100, 'C'), (90, 'XC'), (50, 'L'), (40, 'XL'), + (10, 'X'), (9, 'IX'), (5, 'V'), (4, 'IV'), (1, 'I'))): + """Get the iterator position as an upper-case roman numeral + + >>> it = RepeatItem(iter(("apple", "pear", "orange")), 3) + >>> next(it._iterator) + 'apple' + >>> it.Roman() + 'I' + >>> next(it._iterator) + 'pear' + >>> it.Roman() + 'II' + >>> next(it._iterator) + 'orange' + >>> it.Roman() + 'III' + """ + + n = self.index + 1 + s = "" + for v, r in rnvalues: + rct, n = divmod(n, v) + s = s + r * rct + return s + + @descriptorstr + def roman(self): + """Get the iterator position as a lower-case roman numeral + + >>> it = RepeatItem(iter(("apple", "pear", "orange")), 3) + >>> next(it._iterator) + 'apple' + >>> it.roman() + 'i' + >>> next(it._iterator) + 'pear' + >>> it.roman() + 'ii' + >>> next(it._iterator) + 'orange' + >>> it.roman() + 'iii' + """ + + return self.Roman().lower() + + +if interfaces is not None: + zope.interface.classImplements(RepeatItem, interfaces.ITALESIterator) + + +class RepeatDict(dict): + """Repeat dictionary implementation. + + >>> repeat = RepeatDict({}) + >>> iterator, length = repeat('numbers', range(5)) + >>> length + 5 + + >>> repeat['numbers'] + <chameleon.tal.RepeatItem object at ...> + + """ + + __slots__ = "__setitem__", "__getitem__", "__getattr__" + + def __init__(self, d): + self.__setitem__ = d.__setitem__ + self.__getitem__ = d.__getitem__ + self.__getattr__ = d.__getitem__ + + def __call__(self, key, iterable): + """We coerce the iterable to a tuple and return an iterator + after registering it in the repeat dictionary.""" + + try: + iterable = tuple(iterable) + except TypeError: + if iterable is None: + iterable = () + else: + # The message below to the TypeError is the Python + # 2.5-style exception message. Python 2.4.X also + # raises a TypeError, but with a different message. + # ("TypeError: iteration over non-sequence"). The + # Python 2.5 error message is more helpful. We + # construct the 2.5-style message explicitly here so + # that both Python 2.4.X and Python 2.5+ will raise + # the same error. This makes writing the tests eaiser + # and makes the output easier to understand. + raise TypeError("%r object is not iterable" % + type(iterable).__name__) + + length = len(iterable) + iterator = iter(iterable) + + # Insert as repeat item + self[key] = RepeatItem(iterator, length) + + return iterator, length + + +class ErrorInfo(object): + """Information about an exception passed to an on-error handler.""" + + def __init__(self, err, position=(None, None)): + if isinstance(err, Exception): + self.type = err.__class__ + self.value = err + else: + self.type = err + self.value = None + self.lineno = position[0] + self.offset = position[1] + + +if interfaces is not None: + zope.interface.classImplements(ErrorInfo, interfaces.ITALExpressionErrorInfo) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tales.py b/lib3/Chameleon-2.9.2/src/chameleon/tales.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tales.py @@ -0,0 +1,541 @@ +import re +import sys + +from .astutil import parse +from .astutil import store +from .astutil import load +from .astutil import ItemLookupOnAttributeErrorVisitor +from .codegen import TemplateCodeGenerator +from .codegen import template +from .codegen import reverse_builtin_map +from .astutil import Builtin +from .astutil import Symbol +from .exc import ExpressionError +from .utils import resolve_dotted +from .utils import Markup +from .utils import ast +from .tokenize import Token +from .parser import substitute +from .compiler import Interpolator + +try: + from .py26 import lookup_attr +except SyntaxError: + from .py25 import lookup_attr + + +split_parts = re.compile(r'(?<!\\)\|') +match_prefix = re.compile(r'^\s*([a-z\-_]+):').match +re_continuation = re.compile(r'\\\s*$', re.MULTILINE) + +try: + from __builtin__ import basestring +except ImportError: + basestring = str + + +def resolve_global(value): + name = reverse_builtin_map.get(value) + if name is not None: + return Builtin(name) + + return Symbol(value) + + +def test(expression, engine=None, **env): + if engine is None: + engine = SimpleEngine() + + body = expression(store("result"), engine) + module = ast.Module(body) + module = ast.fix_missing_locations(module) + env['rcontext'] = {} + source = TemplateCodeGenerator(module).code + code = compile(source, '<string>', 'exec') + exec(code, env) + result = env["result"] + + if isinstance(result, basestring): + result = str(result) + + return result + + +def transform_attribute(node): + return template( + "lookup(object, name)", + lookup=Symbol(lookup_attr), + object=node.value, + name=ast.Str(s=node.attr), + mode="eval" + ) + + +class TalesExpr(object): + """Base class. + + This class helps implementations for the Template Attribute + Language Expression Syntax (TALES). + + The syntax evaluates one or more expressions, separated by '|' + (pipe). The first expression that succeeds, is returned. + + Expression: + + expression := (type ':')? line ('|' expression)? + line := .* + + Expression lines may not contain the pipe character unless + escaped. It has a special meaning: + + If the expression to the left of the pipe fails (raises one of the + exceptions listed in ``catch_exceptions``), evaluation proceeds to + the expression(s) on the right. + + Subclasses must implement ``translate`` which assigns a value for + a given expression. + + >>> class PythonPipeExpr(TalesExpr): + ... def translate(self, expression, target): + ... compiler = PythonExpr(expression) + ... return compiler(target, None) + + >>> test(PythonPipeExpr('foo | bar | 42')) + 42 + + >>> test(PythonPipeExpr('foo|42')) + 42 + """ + + exceptions = NameError, \ + ValueError, \ + AttributeError, \ + LookupError, \ + TypeError + + def __init__(self, expression): + self.expression = expression + + def __call__(self, target, engine): + remaining = self.expression + assignments = [] + + while remaining: + if match_prefix(remaining) is not None: + compiler = engine.parse(remaining) + assignment = compiler.assign_value(target) + remaining = "" + else: + for m in split_parts.finditer(remaining): + expression = remaining[:m.start()] + remaining = remaining[m.end():] + break + else: + expression = remaining + remaining = "" + + expression = expression.replace('\\|', '|') + assignment = self.translate(expression, target) + assignments.append(assignment) + + if not assignments: + assignments.append( + self.translate(remaining, target) + ) + + for i, assignment in enumerate(reversed(assignments)): + if i == 0: + body = assignment + else: + body = [ast.TryExcept( + body=assignment, + handlers=[ast.ExceptHandler( + type=ast.Tuple( + elts=map(resolve_global, self.exceptions), + ctx=ast.Load()), + name=None, + body=body, + )], + )] + + return body + + def translate(self, expression, target): + """Return statements that assign a value to ``target``.""" + + raise NotImplementedError( + "Must be implemented by a subclass.") + + +class PathExpr(TalesExpr): + """Path expression compiler. + + Syntax:: + + PathExpr ::= Path [ '|' Path ]* + Path ::= variable [ '/' URL_Segment ]* + variable ::= Name + + For example:: + + request/cookies/oatmeal + nothing + here/some-file 2001_02.html.tar.gz/foo + root/to/branch | default + + When a path expression is evaluated, it attempts to traverse + each path, from left to right, until it succeeds or runs out of + paths. To traverse a path, it first fetches the object stored in + the variable. For each path segment, it traverses from the current + object to the subobject named by the path segment. + + Once a path has been successfully traversed, the resulting object + is the value of the expression. If it is a callable object, such + as a method or class, it is called. + + The semantics of traversal (and what it means to be callable) are + implementation-dependent (see the ``translate`` method). + """ + + def translate(self, expression, target): + raise NotImplementedError( + "Path expressions are not yet implemented. " + "It's unclear whether a general implementation " + "can be devised.") + + +class PythonExpr(TalesExpr): + """Python expression compiler. + + >>> test(PythonExpr('2 + 2')) + 4 + + The Python expression is a TALES expression. That means we can use + the pipe operator: + + >>> test(PythonExpr('foo | 2 + 2 | 5')) + 4 + + To include a pipe character, use a backslash escape sequence: + + >>> test(PythonExpr('\"\|\"')) + '|' + """ + + transform = ItemLookupOnAttributeErrorVisitor(transform_attribute) + + def parse(self, string): + return parse(string, 'eval').body + + def translate(self, expression, target): + # Strip spaces + string = expression.strip() + + # Conver line continuations to newlines + string = substitute(re_continuation, '\n', string) + + # Convert newlines to spaces + string = string.replace('\n', ' ') + + try: + value = self.parse(string) + except SyntaxError: + exc = sys.exc_info()[1] + raise ExpressionError(exc.msg, string) + + # Transform attribute lookups to allow fallback to item lookup + self.transform.visit(value) + + return [ast.Assign(targets=[target], value=value)] + + +class ImportExpr(object): + re_dotted = re.compile(r'^[A-Za-z.]+$') + + def __init__(self, expression): + self.expression = expression + + def __call__(self, target, engine): + string = self.expression.strip().replace('\n', ' ') + value = template( + "RESOLVE(NAME)", + RESOLVE=Symbol(resolve_dotted), + NAME=ast.Str(s=string), + mode="eval", + ) + return [ast.Assign(targets=[target], value=value)] + + +class NotExpr(object): + """Negates the expression. + + >>> engine = SimpleEngine(PythonExpr) + + >>> test(NotExpr('False'), engine) + True + >>> test(NotExpr('True'), engine) + False + """ + + def __init__(self, expression): + self.expression = expression + + def __call__(self, target, engine): + compiler = engine.parse(self.expression) + body = compiler.assign_value(target) + return body + template("target = not target", target=target) + + +class StructureExpr(object): + """Wraps the expression result as 'structure'. + + >>> engine = SimpleEngine(PythonExpr) + + >>> test(StructureExpr('\"<tt>foo</tt>\"'), engine) + '<tt>foo</tt>' + """ + + wrapper_class = Symbol(Markup) + + def __init__(self, expression): + self.expression = expression + + def __call__(self, target, engine): + compiler = engine.parse(self.expression) + body = compiler.assign_value(target) + return body + template( + "target = wrapper(target)", + target=target, + wrapper=self.wrapper_class + ) + + +class IdentityExpr(object): + """Identity expression. + + Exists to demonstrate the interface. + + >>> test(IdentityExpr('42')) + 42 + """ + + def __init__(self, expression): + self.expression = expression + + def __call__(self, target, engine): + compiler = engine.parse(self.expression) + return compiler.assign_value(target) + + +class StringExpr(object): + """Similar to the built-in ``string.Template``, but uses an + expression engine to support pluggable string substitution + expressions. + + Expr string: + + string := (text | substitution) (string)? + substitution := ('$' variable | '${' expression '}') + text := .* + + In other words, an expression string can contain multiple + substitutions. The text- and substitution parts will be + concatenated back into a string. + + >>> test(StringExpr('Hello ${name}!'), name='world') + 'Hello world!' + + In the default configuration, braces may be omitted if the + expression is an identifier. + + >>> test(StringExpr('Hello $name!'), name='world') + 'Hello world!' + + The ``braces_required`` flag changes this setting: + + >>> test(StringExpr('Hello $name!', True)) + 'Hello $name!' + + We can escape interpolation using the standard escaping + syntax: + + >>> test(StringExpr('\\${name}')) + '\\\${name}' + + Multiple interpolations in one: + + >>> test(StringExpr(\"Hello ${'a'}${'b'}${'c'}!\")) + 'Hello abc!' + + Here's a more involved example taken from a javascript source: + + >>> result = test(StringExpr(\"\"\" + ... function(oid) { + ... $('#' + oid).autocomplete({source: ${'source'}}); + ... } + ... \"\"\")) + + >>> 'source: source' in result + True + + In the above examples, the expression is evaluated using the + dummy engine which just returns the input as a string. + + As an example, we'll implement an expression engine which + instead counts the number of characters in the expresion and + returns an integer result. + + >>> class engine: + ... @staticmethod + ... def parse(expression): + ... class compiler: + ... @staticmethod + ... def assign_text(target): + ... return [ + ... ast.Assign( + ... targets=[target], + ... value=ast.Num(n=len(expression)) + ... )] + ... + ... return compiler + + This will demonstrate how the string expression coerces the + input to a string. + + >>> expr = StringExpr( + ... 'There are ${hello world} characters in \"hello world\"') + + We evaluate the expression using the new engine: + + >>> test(expr, engine) + 'There are 11 characters in \"hello world\"' + """ + + def __init__(self, expression, braces_required=False): + # The code relies on the expression being a token string + if not isinstance(expression, Token): + expression = Token(expression, 0) + + self.translator = Interpolator(expression, braces_required) + + def __call__(self, name, engine): + return self.translator(name, engine) + + +class ProxyExpr(StringExpr): + def __init__(self, name, *args): + super(ProxyExpr, self).__init__(*args) + self.name = name + + def __call__(self, target, engine): + assignment = super(ProxyExpr, self).__call__(target, engine) + return assignment + [ + ast.Assign(targets=[target], value=ast.Call( + func=load(self.name), + args=[target], + keywords=[], + starargs=None, + kwargs=None + )) + ] + + +class ExistsExpr(object): + """Boolean wrapper. + + Return 0 if the expression results in an exception, otherwise 1. + + As a means to generate exceptions, we set up an expression engine + which evaluates the provided expression using Python: + + >>> engine = SimpleEngine(PythonExpr) + + >>> test(ExistsExpr('int(0)'), engine) + 1 + >>> test(ExistsExpr('int(None)'), engine) + 0 + + """ + + exceptions = AttributeError, LookupError, TypeError, NameError, KeyError + + def __init__(self, expression): + self.expression = expression + + def __call__(self, target, engine): + ignore = store("_ignore") + compiler = engine.parse(self.expression) + body = compiler.assign_value(ignore) + + classes = map(resolve_global, self.exceptions) + + return [ + ast.TryExcept( + body=body, + handlers=[ast.ExceptHandler( + type=ast.Tuple(elts=classes, ctx=ast.Load()), + name=None, + body=template("target = 0", target=target), + )], + orelse=template("target = 1", target=target) + ) + ] + + +class ExpressionParser(object): + def __init__(self, factories, default): + self.factories = factories + self.default = default + + def __call__(self, expression): + m = match_prefix(expression) + if m is not None: + prefix = m.group(1) + expression = expression[m.end():] + else: + prefix = self.default + + try: + factory = self.factories[prefix] + except KeyError: + exc = sys.exc_info()[1] + raise LookupError( + "Unknown expression type: %s." % str(exc) + ) + + return factory(expression) + + +class SimpleEngine(object): + expression = PythonExpr + + def __init__(self, expression=None): + if expression is not None: + self.expression = expression + + def parse(self, string): + compiler = self.expression(string) + return SimpleCompiler(compiler, self) + + +class SimpleCompiler(object): + def __init__(self, compiler, engine): + self.compiler = compiler + self.engine = engine + + def assign_text(self, target): + """Assign expression string as a text value.""" + + return self._assign_value_and_coerce(target, "str") + + def assign_value(self, target): + """Assign expression string as object value.""" + + return self.compiler(target, self.engine) + + def _assign_value_and_coerce(self, target, builtin): + return self.assign_value(target) + template( + "target = builtin(target)", + target=target, + builtin=builtin + ) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/template.py b/lib3/Chameleon-2.9.2/src/chameleon/template.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/template.py @@ -0,0 +1,332 @@ +from __future__ import with_statement + +import os +import sys +import copy +import hashlib +import shutil +import logging +import tempfile +import inspect + +pkg_digest = hashlib.sha1(__name__.encode('utf-8')) + +try: + import pkg_resources +except ImportError: + logging.info("Setuptools not installed. Unable to determine version.") +else: + for path in sys.path: + for distribution in pkg_resources.find_distributions(path): + if distribution.has_version(): + version = distribution.version.encode('utf-8') + pkg_digest.update(version) + + +from .exc import TemplateError +from .exc import ExceptionFormatter +from .compiler import Compiler +from .config import DEBUG_MODE +from .config import AUTO_RELOAD +from .config import EAGER_PARSING +from .config import CACHE_DIRECTORY +from .loader import ModuleLoader +from .loader import MemoryLoader +from .nodes import Module +from .utils import DebuggingOutputStream +from .utils import Scope +from .utils import join +from .utils import mangle +from .utils import create_formatted_exception +from .utils import read_bytes +from .utils import raise_with_traceback +from .utils import byte_string + + +log = logging.getLogger('chameleon.template') + + +def _make_module_loader(): + remove = False + if CACHE_DIRECTORY: + path = CACHE_DIRECTORY + else: + path = tempfile.mkdtemp() + remove = True + + return ModuleLoader(path) + + +class BaseTemplate(object): + """Template base class. + + Takes a string input which must be one of the following: + + - a unicode string (or string on Python 3); + - a utf-8 encoded byte string; + - a byte string for an XML document that defines an encoding + in the document premamble; + - an HTML document that specifies the encoding via the META tag. + + Note that the template input is decoded, parsed and compiled on + initialization. + """ + + default_encoding = "utf-8" + + # This attribute is strictly informational in this template class + # and is used in exception formatting. It may be set on + # initialization using the optional ``filename`` keyword argument. + filename = '<string>' + + _cooked = False + + if DEBUG_MODE or CACHE_DIRECTORY: + loader = _make_module_loader() + else: + loader = MemoryLoader() + + if DEBUG_MODE: + output_stream_factory = DebuggingOutputStream + else: + output_stream_factory = list + + debug = DEBUG_MODE + + # The ``builtins`` dictionary can be used by a template class to + # add symbols which may not be redefined and which are (cheaply) + # available in the template variable scope + builtins = {} + + # The ``builtins`` dictionary is updated with this dictionary at + # cook time. Note that it can be provided at class initialization + # using the ``extra_builtins`` keyword argument. + extra_builtins = {} + + # Expression engine must be provided by subclass + engine = None + + # When ``strict`` is set, expressions must be valid at compile + # time. When not set, this is only required at evaluation time. + strict = True + + def __init__(self, body=None, **config): + self.__dict__.update(config) + + if body is not None: + self.write(body) + + # This is only necessary if the ``debug`` flag was passed as a + # keyword argument + if self.__dict__.get('debug') is True: + self.loader = _make_module_loader() + + def __call__(self, **kwargs): + return self.render(**kwargs) + + def __repr__(self): + return "<%s %s>" % (self.__class__.__name__, self.filename) + + @property + def keep_body(self): + # By default, we only save the template body if we're + # in debugging mode (to save memory). + return self.__dict__.get('keep_body', DEBUG_MODE) + + @property + def keep_source(self): + # By default, we only save the generated source code if we're + # in debugging mode (to save memory). + return self.__dict__.get('keep_source', DEBUG_MODE) + + def cook(self, body): + digest = self._digest(body) + builtins_dict = self.builtins.copy() + builtins_dict.update(self.extra_builtins) + names, builtins = zip(*builtins_dict.items()) + program = self._cook(body, digest, names) + + initialize = program['initialize'] + functions = initialize(*builtins) + + for name, function in functions.items(): + setattr(self, "_" + name, function) + + self._cooked = True + + if self.keep_body: + self.body = body + + def cook_check(self): + assert self._cooked + + def parse(self, body): + raise NotImplementedError("Must be implemented by subclass.") + + def render(self, **__kw): + econtext = Scope(__kw) + rcontext = {} + self.cook_check() + stream = self.output_stream_factory() + try: + self._render(stream, econtext, rcontext) + except: + cls, exc, tb = sys.exc_info() + errors = rcontext.get('__error__') + if errors: + formatter = exc.__str__ + if isinstance(formatter, ExceptionFormatter): + if errors is not formatter._errors: + formatter._errors.extend(errors) + raise + + formatter = ExceptionFormatter(errors, econtext, rcontext) + + try: + exc = create_formatted_exception(exc, cls, formatter) + except TypeError: + pass + + raise_with_traceback(exc, tb) + + raise + + return join(stream) + + def write(self, body): + if isinstance(body, byte_string): + body, encoding, content_type = read_bytes( + body, self.default_encoding + ) + else: + content_type = body.startswith('<?xml') + encoding = None + + self.content_type = content_type + self.content_encoding = encoding + + self.cook(body) + + def _get_module_name(self, digest): + return "%s.py" % digest + + def _cook(self, body, digest, builtins): + name = self._get_module_name(digest) + cooked = self.loader.get(name) + if cooked is None: + try: + source = self._make(body, builtins) + if self.debug: + source = "# template: %s\n#\n%s" % (self.filename, source) + if self.keep_source: + self.source = source + cooked = self.loader.build(source, name) + except TemplateError: + exc = sys.exc_info()[1] + exc.filename = self.filename + raise + elif self.keep_source: + module = sys.modules.get(cooked.get('__name__')) + if module is not None: + self.source = inspect.getsource(module) + else: + self.source = None + + return cooked + + def _digest(self, body): + class_name = type(self).__name__.encode('utf-8') + sha = pkg_digest.copy() + sha.update(body.encode('utf-8', 'ignore')) + sha.update(class_name) + return sha.hexdigest() + + def _compile(self, program, builtins): + compiler = Compiler(self.engine, program, builtins, strict=self.strict) + return compiler.code + + def _make(self, body, builtins): + program = self.parse(body) + module = Module("initialize", program) + return self._compile(module, builtins) + + +class BaseTemplateFile(BaseTemplate): + """File-based template base class. + + Relative path names are supported only when a template loader is + provided as the ``loader`` parameter. + """ + + # Auto reload is not enabled by default because it's a significant + # performance hit + auto_reload = AUTO_RELOAD + + def __init__(self, filename, auto_reload=None, **config): + # Normalize filename + filename = os.path.abspath( + os.path.normpath(os.path.expanduser(filename)) + ) + + self.filename = filename + + # Override reload setting only if value is provided explicitly + if auto_reload is not None: + self.auto_reload = auto_reload + + super(BaseTemplateFile, self).__init__(**config) + + if EAGER_PARSING: + self.cook_check() + + def cook_check(self): + if self.auto_reload: + mtime = self.mtime() + + if mtime != self._v_last_read: + self._v_last_read = mtime + self._cooked = False + + if self._cooked is False: + body = self.read() + log.debug("cooking %r (%d bytes)..." % (self.filename, len(body))) + self.cook(body) + + def mtime(self): + try: + return os.path.getmtime(self.filename) + except (IOError, OSError): + return 0 + + def read(self): + with open(self.filename, "rb") as f: + data = f.read() + + body, encoding, content_type = read_bytes( + data, self.default_encoding + ) + + # In non-XML mode, we support various platform-specific line + # endings and convert them to the UNIX newline character + if content_type != "text/xml" and '\r' in body: + body = body.replace('\r\n', '\n').replace('\r', '\n') + + self.content_type = content_type + self.content_encoding = encoding + + return body + + def _get_module_name(self, digest): + filename = os.path.basename(self.filename) + mangled = mangle(filename) + return "%s_%s.py" % (mangled, digest) + + def _get_filename(self): + return self.__dict__.get('filename') + + def _set_filename(self, filename): + self.__dict__['filename'] = filename + self._v_last_read = None + self._cooked = False + + filename = property(_get_filename, _set_filename) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/__init__.py b/lib3/Chameleon-2.9.2/src/chameleon/tests/__init__.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/__init__.py @@ -0,0 +1,1 @@ +# diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-interpolation.txt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-interpolation.txt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-interpolation.txt @@ -0,0 +1,1 @@ +${'<Hello world>'}<&> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-variable-scope.html b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-variable-scope.html new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-variable-scope.html @@ -0,0 +1,7 @@ +<html> + <body py:with="text 'Hello world!'"> + ${text} + $text + </body> + ${text | 'Goodbye world!'} +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-variable-scope.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-variable-scope.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001-variable-scope.pt @@ -0,0 +1,11 @@ +<html> + <body tal:define="text 'Hello world!'"> + ${text} + </body> + <tal:check condition="exists: text"> + bad + </tal:check> + <tal:check condition="not: exists: text"> + ok + </tal:check> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/001.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/002-repeat-scope.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/002-repeat-scope.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/002-repeat-scope.pt @@ -0,0 +1,8 @@ +<html> + <body> + <div tal:repeat="text ('Hello', 'Goodbye')"> + <span tal:repeat="char ('!', '.')">${text}${char}</span> + </div> + <tal:check condition="not: exists: text">ok</tal:check> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/002.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/002.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/002.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc ></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/003-content.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/003-content.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/003-content.pt @@ -0,0 +1,17 @@ +<html> + <body> + <div tal:content="'Hello world!'" /> + <div tal:content="'Hello world!'" />1 + 2<div tal:content="'Hello world!'" /> + <div tal:content="'Hello world!'" />3 + <div tal:content="'Hello world!'">4</div>5 + 6<div tal:content="'Hello world!'"></div> + <div tal:content="1" /> + <div tal:content="1.0" /> + <div tal:content="True" /> + <div tal:content="False" /> + <div tal:content="0" /> + <div tal:content="None" /> + <div tal:replace="content" /> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/003.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/003.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/003.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc></doc > diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/004-attributes.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/004-attributes.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/004-attributes.pt @@ -0,0 +1,18 @@ +<html> + <body> + <span tal:attributes="class 'hello'" /> + <span class="goodbye" tal:attributes="class 'hello'" /> + <span CLASS="goodbye" tal:attributes="class 'hello'" /> + <span tal:attributes="class None" /> + <span a="1" b="2" c="3" tal:attributes="a None" /> + <span a="1" b="2" c="3" tal:attributes="b None" /> + <span a="1" b="2" c="3" tal:attributes="c None" /> + <span a="1" b="2" c="3" tal:attributes="b None; c None" /> + <span a="1" b="2" c="3" tal:attributes="b string:;;" /> + <span a="1" b="2" c="3" tal:attributes="b string:&" /> + <span class="hello" tal:attributes="class 'goodbye'" /> + <span class="hello" tal:attributes="class '"goodbye"'" /> + <span class="hello" tal:attributes="class '\'goodbye\''" /> + <span class='hello' tal:attributes="class '\'goodbye\''" /> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/004.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/004.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/004.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ATTLIST doc a1 CDATA #IMPLIED> +]> +<doc a1="v1"></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/005-default.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/005-default.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/005-default.pt @@ -0,0 +1,12 @@ +<html> + <body> + <img class="default" tal:attributes="class default" /> + <img tal:attributes="class default" /> + <span tal:content="default">Default</span> + <span tal:content="True">Default</span> + <span tal:content="False">Default</span> + <span tal:content="default"> + <em>${'Computed default'}</em> + </span> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/005.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/005.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/005.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ATTLIST doc a1 CDATA #IMPLIED> +]> +<doc a1 = "v1"></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/006-attribute-interpolation.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/006-attribute-interpolation.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/006-attribute-interpolation.pt @@ -0,0 +1,9 @@ +<html> + <body class="ltr" tal:define="hash string:#"> + <img src="${'#'}" alt="copyright (c) ${2010}" /> + <img src="" alt="copyright (c) ${2010}" tal:attributes="src string:$hash" /> + <img src="" alt="copyright (c) ${2010}" tal:attributes="src string:${hash}" /> + <img src="${None}" alt="$ignored" /> + <img src="" alt="${'%stype \'str\'%s' % (chr(60), chr(62))}" /> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/006.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/006.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/006.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ATTLIST doc a1 CDATA #IMPLIED> +]> +<doc a1='v1'></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/007-content-interpolation.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/007-content-interpolation.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/007-content-interpolation.pt @@ -0,0 +1,15 @@ +<html> + <body> + ${'Hello world!'} + ${literal} + ${structure: literal.s} + ${"%stype 'str'%s" % (chr(60), chr(62))} + && + ${None} + ${None or + 'Hello world'} + $leftalone + <div>${None}</div> + <div>${1 < 2 and 'Hello world' or None}</div> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/007.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/007.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/007.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc> </doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/008-builtins.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/008-builtins.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/008-builtins.pt @@ -0,0 +1,11 @@ +<html> + <body> + ${nothing} + <div tal:attributes="class string:dynamic" class="static"> + ${attrs['class']} + </div> + <div tal:define="nothing string:nothing"> + ${nothing} + </div> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/008.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/008.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/008.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc>&<>"'</doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/009-literals.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/009-literals.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/009-literals.pt @@ -0,0 +1,5 @@ +<html> + <body> + ${literal} + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/009.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/009.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/009.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc> </doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/010-structure.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/010-structure.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/010-structure.pt @@ -0,0 +1,9 @@ +<html> + <body> + <div tal:content="text string:1 < 2" /> + <div tal:content="structure string:2 < 3, 2&3, 2<3, 2>3" /> + <div tal:content="structure string:3 ${'<'} 4" /> + <div tal:content="structure '%d < %d' % (4, 5)" /> + <div tal:replace="structure content" /> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/010.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/010.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/010.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ATTLIST doc a1 CDATA #IMPLIED> +]> +<doc a1="v1" ></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/011-messages.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/011-messages.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/011-messages.pt @@ -0,0 +1,9 @@ +<html> + <body> + <div tal:content="text message" /> + <div tal:content="structure message" /> + <div tal:content="text string:${message}" /> + <div tal:content="structure string:${message}" /> + ${message} + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/011.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/011.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/011.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ATTLIST doc a1 CDATA #IMPLIED a2 CDATA #IMPLIED> +]> +<doc a1="v1" a2="v2"></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/012-translation.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/012-translation.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/012-translation.pt @@ -0,0 +1,21 @@ +<html> + <body> + <div i18n:translate=""> + Hello world! + </div> + <div i18n:translate="hello_world"> + Hello world! + </div> + <div i18n:translate=""> + <sup>Hello world!</sup> + </div> + <div i18n:translate=""> + Hello <em i18n:name="first">${'world'}</em>! + Goodbye <em i18n:name="second">${'planet'}</em>! + </div> + <div i18n:translate="hello_goodbye"> + Hello <em i18n:name="first">${'world'}</em>! + Goodbye <em i18n:name="second">${'planet'}</em>! + </div> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/012.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/012.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/012.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ATTLIST doc : CDATA #IMPLIED> +]> +<doc :="v1"></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/013-repeat-nested.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/013-repeat-nested.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/013-repeat-nested.pt @@ -0,0 +1,11 @@ +<html> + <body> + <table> + <tr tal:repeat="i (1,2)"> + <td tal:repeat="j (1,2)"> + [${i},${j}] + </td> + </tr> + </table> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/013.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/013.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/013.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ATTLIST doc _.-0123456789 CDATA #IMPLIED> +]> +<doc _.-0123456789="v1"></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/014-repeat-nested-similar.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/014-repeat-nested-similar.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/014-repeat-nested-similar.pt @@ -0,0 +1,7 @@ +<html> + <body> + <span tal:repeat="i (3,4)"> + <span tal:repeat="j (3,4)">[${i},${j}]</span> + </span> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/014.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/014.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/014.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ATTLIST doc abcdefghijklmnopqrstuvwxyz CDATA #IMPLIED> +]> +<doc abcdefghijklmnopqrstuvwxyz="v1"></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/015-translation-nested.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/015-translation-nested.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/015-translation-nested.pt @@ -0,0 +1,10 @@ +<html> + <body> + <div i18n:translate=""> + Price: + <span i18n:name="price" i18n:translate=""> + Per kilo <em i18n:name="amount">${12.5}</em> + </span> + </div> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/015.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/015.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/015.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ATTLIST doc ABCDEFGHIJKLMNOPQRSTUVWXYZ CDATA #IMPLIED> +]> +<doc ABCDEFGHIJKLMNOPQRSTUVWXYZ="v1"></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/016-explicit-translation.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/016-explicit-translation.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/016-explicit-translation.pt @@ -0,0 +1,11 @@ +<html> + <body> + <div i18n:translate="" tal:content="string:Hello world!"> + Hello world! + </div> + <img alt="${'Hello world!'}" i18n:attributes="alt" /> + <img alt="${'Hello world!'}" i18n:attributes="alt hello_world" /> + <img tal:attributes="alt 'Hello world!'" i18n:attributes="alt" /> + <img tal:attributes="alt 'Hello world!'" i18n:attributes="alt hello_world" /> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/016.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/016.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/016.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc><?pi?></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/017-omit-tag.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/017-omit-tag.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/017-omit-tag.pt @@ -0,0 +1,12 @@ +<html> + <body> + <div tal:omit-tag="">Hello world!</div> + <div tal:omit-tag="">1 + Hello world! + 2</div>3 + 4<div tal:omit-tag="True">Hello world!</div> + <div tal:omit-tag="False">Hello world!</div> + <div class="omitted" tal:omit-tag="True">Hello world!</div> + <div class="${'omitted'}" tal:omit-tag="True">Hello world!</div> + </body> +</html> \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/017.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/017.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/017.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc><?pi some data ? > <??></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/018-translation-nested-dynamic.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/018-translation-nested-dynamic.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/018-translation-nested-dynamic.pt @@ -0,0 +1,13 @@ +<div xmlns="http://www.w3.org/1999/xhtml" + xmlns:i18n="http://xml.zope.org/namespaces/i18n"> + <div i18n:translate="" tal:omit-tag=""> + <span i18n:name="monthname" + i18n:translate="" + tal:content="'october'" + tal:omit-tag="">monthname</span> + <span i18n:name="year" + i18n:translate="" + tal:content="1982" + tal:omit-tag="">year</span> + </div> +</div> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/018.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/018.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/018.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc><![CDATA[<foo>]]></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/019-replace.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/019-replace.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/019-replace.pt @@ -0,0 +1,13 @@ +<html> + <body> + <div tal:replace="'Hello world!'" /> + <div tal:replace="'Hello world!'" />1 + 2<div tal:replace="'Hello world!'" /> + <div tal:replace="'Hello world!'" />3 + <div tal:replace="'Hello world!'">4</div>5 + 6<div tal:replace="'Hello world!'"></div> + <div tal:replace="1" /> + <div tal:replace="1.0" /> + <div tal:replace="True" /> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/019.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/019.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/019.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc><![CDATA[<&]]></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/020-on-error.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/020-on-error.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/020-on-error.pt @@ -0,0 +1,10 @@ +<html> + <body> + <div id="test" tal:attributes="class python: 'abc' + 2" tal:on-error="nothing" /> + <div tal:on-error="string:${type(error.value).__name__} thrown at ${error.lineno}:${error.offset}."> + <div tal:content="undefined" /> + </div> + <div tal:replace="undefined" tal:on-error="nothing" /> + <div tal:content="undefined" tal:on-error="nothing" /> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/020.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/020.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/020.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc><![CDATA[<&]>]]]></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/021-translation-domain.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/021-translation-domain.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/021-translation-domain.pt @@ -0,0 +1,16 @@ +<html> + <body i18n:domain="old"> + <div i18n:domain="new" i18n:translate=""> + Hello world! + </div> + <div i18n:translate=""> + Hello world! + </div> + <div class="test" i18n:domain="new" i18n:attributes="class"> + Hello world! + </div> + <div class="test" i18n:domain="new" i18n:attributes="class test_msgid"> + Hello world! + </div> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/021.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/021.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/021.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc><!-- a comment --></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/022-switch.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/022-switch.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/022-switch.pt @@ -0,0 +1,13 @@ +<html> + <body> + <div tal:switch="True"> + <span tal:case="False">bad</span> + <span tal:case="True">ok</span> + <span tal:case="not not True">ok</span> + </div> + <div tal:switch="True"> + <span tal:case="False">bad</span> + <span tal:case="default">ok</span> + </div> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/022.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/022.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/022.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc><!-- a comment ->--></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/023-condition.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/023-condition.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/023-condition.pt @@ -0,0 +1,6 @@ +<html> + <body tal:condition="True"> + <span tal:define="selector False" tal:condition="selector">bad</span> + <span tal:condition="True">ok</span> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/023.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/023.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/023.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ENTITY e ""> +]> +<doc>&e;</doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/024-namespace-elements.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/024-namespace-elements.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/024-namespace-elements.pt @@ -0,0 +1,16 @@ +<html> + <body> + <tal:first> + <tal:second> + ${'first'} + </tal:second> + second + </tal:first> + <tal:block condition="True"> + ok + </tal:block> + <tal:block condition="False"> + bad + </tal:block> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/024.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/024.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/024.xml @@ -0,0 +1,6 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (foo)> +<!ELEMENT foo (#PCDATA)> +<!ENTITY e "<foo></foo>"> +]> +<doc>&e;</doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/025-repeat-whitespace.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/025-repeat-whitespace.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/025-repeat-whitespace.pt @@ -0,0 +1,14 @@ +<html> + <body> + <ul> + <tal:item repeat="i (1, 2, 3)"><li tal:content="i" /></tal:item> + <span tal:omit-tag="" tal:repeat="j (1, 2, 3)"><li tal:content="j" /></span> + <tal:count> + <tal:count-loop repeat="count (1, 2, 3)"> + <span tal:replace="count" + /><tal:comma condition="not repeat['count'].end">,</tal:comma> + </tal:count-loop> + </tal:count>. + </ul> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/025.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/025.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/025.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (foo*)> +<!ELEMENT foo (#PCDATA)> +]> +<doc><foo/><foo></foo></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/026-repeat-variable.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/026-repeat-variable.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/026-repeat-variable.pt @@ -0,0 +1,13 @@ +<div xmlns="http://www.w3.org/1999/xhtml" + xmlns:tal="http://xml.zope.org/namespaces/tal"> + <ul> + <li tal:attributes="class repeat['i'].even()+repeat['i'].odd()" name="${i}-${repeat.i.index}" tal:repeat="i range(3)"><span tal:replace="i" /></li> + </ul> + <ul> + <li tal:attributes="class repeat['i'].even+repeat['i'].odd" + tal:repeat="i range(3)"><span tal:replace="i" /></li> + </ul> + <ul> + <li tal:repeat="i range(3)"><span tal:condition="repeat['i'].even" tal:replace="repeat['i'].even" /><span tal:condition="repeat['i'].odd" tal:replace="repeat['i'].odd" /></li> + </ul> +</div> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/026.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/026.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/026.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (foo*)> +<!ELEMENT foo EMPTY> +]> +<doc><foo/><foo></foo></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/027-attribute-replacement.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/027-attribute-replacement.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/027-attribute-replacement.pt @@ -0,0 +1,11 @@ +<div xmlns="http://www.w3.org/1999/xhtml" + xmlns:tal="http://xml.zope.org/namespaces/tal"> + <span id="test" + class="dummy" + onClick="" + tal:define="a 'abc'" + tal:attributes="class 'def' + a + default; style 'hij'; onClick 'alert();;'" + tal:content="a + 'ghi'" /> + <span tal:replace="'Hello World!'">Hello <b>Universe</b>!</span> + <span tal:replace="'Hello World!'"><b>Hello Universe!</b></span> +</div> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/027.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/027.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/027.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (foo*)> +<!ELEMENT foo ANY> +]> +<doc><foo/><foo></foo></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/028-attribute-toggle.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/028-attribute-toggle.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/028-attribute-toggle.pt @@ -0,0 +1,6 @@ +<div xmlns="http://www.w3.org/1999/xhtml" + xmlns:tal="http://xml.zope.org/namespaces/tal"> + <option tal:attributes="selected True"></option> + <option tal:attributes="selected False"></option> + <option tal:attributes="selected None"></option> +</div> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/028.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/028.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/028.xml @@ -0,0 +1,5 @@ +<?xml version="1.0"?> +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/029-attribute-ordering.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/029-attribute-ordering.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/029-attribute-ordering.pt @@ -0,0 +1,5 @@ +<div xmlns="http://www.w3.org/1999/xhtml" + xmlns:tal="http://xml.zope.org/namespaces/tal"> + <a rel="self" href="http://repoze.org" id="link-id" + tal:attributes="href 'http://python.org'" /> +</div> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/029.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/029.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/029.xml @@ -0,0 +1,5 @@ +<?xml version='1.0'?> +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/030-repeat-tuples.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/030-repeat-tuples.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/030-repeat-tuples.pt @@ -0,0 +1,7 @@ +<html> + <body> + <div tal:repeat="(i, j) ((1, 2), (3, 4))"> + ${repeat['i', 'j'].number}, ${i}, ${j} + </div> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/030.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/030.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/030.xml @@ -0,0 +1,5 @@ +<?xml version = "1.0"?> +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/031-namespace-with-tal.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/031-namespace-with-tal.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/031-namespace-with-tal.pt @@ -0,0 +1,7 @@ +<div> + <tal:example replace="'Hello World!'" /> + <tal:example tal:replace="'Hello World!'" /> + <tal:div content="'Hello World!'" /> + <tal:multiple repeat="i range(3)" replace="i" /> + <tal:div condition="True">True</tal:div> +</div> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/031.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/031.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/031.xml @@ -0,0 +1,5 @@ +<?xml version='1.0' encoding="UTF-8"?> +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/032-master-template.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/032-master-template.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/032-master-template.pt @@ -0,0 +1,20 @@ +<html i18n:domain="master" metal:define-macro="main" tal:define="content nothing"> + <head> + <title metal:define-slot="title" + metal:define-macro="title" + tal:define="has_title exists: title" + tal:content="title if has_title else default">Master template + + +
+ + + +
+ + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/032.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/032.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/032.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/033-use-macro-trivial.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/033-use-macro-trivial.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/033-use-macro-trivial.pt @@ -0,0 +1,1 @@ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/033.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/033.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/033.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/034-use-template-as-macro.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/034-use-template-as-macro.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/034-use-template-as-macro.pt @@ -0,0 +1,1 @@ + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/034.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/034.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/034.xml @@ -0,0 +1,4 @@ + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/035-use-macro-with-fill-slot.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/035-use-macro-with-fill-slot.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/035-use-macro-with-fill-slot.pt @@ -0,0 +1,5 @@ + + + ${kind} title + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/035.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/035.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/035.xml @@ -0,0 +1,4 @@ + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/036-use-macro-inherits-dynamic-scope.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/036-use-macro-inherits-dynamic-scope.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/036-use-macro-inherits-dynamic-scope.pt @@ -0,0 +1,2 @@ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/036.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/036.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/036.xml @@ -0,0 +1,5 @@ + +]> + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/037-use-macro-local-variable-scope.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/037-use-macro-local-variable-scope.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/037-use-macro-local-variable-scope.pt @@ -0,0 +1,5 @@ + + + ok + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/037.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/037.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/037.xml @@ -0,0 +1,6 @@ + +]> + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/038-use-macro-globals.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/038-use-macro-globals.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/038-use-macro-globals.pt @@ -0,0 +1,6 @@ + + + + ok + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/038.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/038.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/038.xml @@ -0,0 +1,6 @@ + + +]> + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/039-globals.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/039-globals.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/039-globals.pt @@ -0,0 +1,1 @@ + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/039.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/039.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/039.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/040-macro-using-template-symbol.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/040-macro-using-template-symbol.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/040-macro-using-template-symbol.pt @@ -0,0 +1,20 @@ + + + + + ${foo} +
+ +
+ + + +
+ diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/040.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/040.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/040.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/041-translate-nested-names.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/041-translate-nested-names.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/041-translate-nested-names.pt @@ -0,0 +1,22 @@ + + +
+ Hello + + world! + +
+
+ Hello + + world! + +
+
+ Goodbye + + world! + +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/041.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/041.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/041.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/042-use-macro-fill-footer.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/042-use-macro-fill-footer.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/042-use-macro-fill-footer.pt @@ -0,0 +1,3 @@ + + New footer + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/042.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/042.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/042.xml @@ -0,0 +1,4 @@ + +]> +A diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/043-macro-nested-dynamic-vars.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/043-macro-nested-dynamic-vars.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/043-macro-nested-dynamic-vars.pt @@ -0,0 +1,19 @@ + + + + + + + + + + ${title} + + + + +
+ + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/043.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/043.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/043.xml @@ -0,0 +1,6 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/044-tuple-define.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/044-tuple-define.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/044-tuple-define.pt @@ -0,0 +1,5 @@ + + + ${a}, ${b} + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/044.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/044.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/044.xml @@ -0,0 +1,10 @@ + + + +]> + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/045-namespaces.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/045-namespaces.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/045-namespaces.pt @@ -0,0 +1,13 @@ + + +]> + + + ZZZ YYY XXX + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/045.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/045.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/045.xml @@ -0,0 +1,6 @@ + + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/046-extend-macro.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/046-extend-macro.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/046-extend-macro.pt @@ -0,0 +1,6 @@ + + + New footer + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/046.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/046.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/046.xml @@ -0,0 +1,6 @@ + + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/047-use-extended-macro.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/047-use-extended-macro.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/047-use-extended-macro.pt @@ -0,0 +1,3 @@ + + Extended + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/047.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/047.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/047.xml @@ -0,0 +1,5 @@ + +]> +X +Y diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/048-use-extended-macro-fill-original.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/048-use-extended-macro-fill-original.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/048-use-extended-macro-fill-original.pt @@ -0,0 +1,5 @@ + + + Extended footer + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/048.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/048.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/048.xml @@ -0,0 +1,4 @@ + +]> +] diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/049-entities-in-attributes.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/049-entities-in-attributes.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/049-entities-in-attributes.pt @@ -0,0 +1,11 @@ + + +
+    
+    
+  
+  
+    
+  
+
\ No newline at end of file
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/058.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/058.xml
new file mode 100644
--- /dev/null
+++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/058.xml
@@ -0,0 +1,5 @@
+
+
+]>
+
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/059-embedded-javascript.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/059-embedded-javascript.pt
new file mode 100644
--- /dev/null
+++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/059-embedded-javascript.pt
@@ -0,0 +1,6 @@
+
+  
+    test
+    test
+  
+
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/059.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/059.xml
new file mode 100644
--- /dev/null
+++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/059.xml
@@ -0,0 +1,10 @@
+
+
+
+]>
+
+
+
+
+
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/060-macro-with-multiple-same-slots.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/060-macro-with-multiple-same-slots.pt
new file mode 100644
--- /dev/null
+++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/060-macro-with-multiple-same-slots.pt
@@ -0,0 +1,8 @@
+
+  
+    <metal:title define-slot="title">Untitled</metal:title>
+  
+  
+    

Untitled

+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/060.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/060.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/060.xml @@ -0,0 +1,4 @@ + +]> +X Y diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/061-fill-one-slot-but-two-defined.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/061-fill-one-slot-but-two-defined.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/061-fill-one-slot-but-two-defined.pt @@ -0,0 +1,3 @@ + + My document + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/061.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/061.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/061.xml @@ -0,0 +1,4 @@ + +]> +£ diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/062-comments-and-expressions.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/062-comments-and-expressions.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/062-comments-and-expressions.pt @@ -0,0 +1,27 @@ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ + + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/062.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/062.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/062.xml @@ -0,0 +1,4 @@ + +]> +เจม?????? diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/063-continuation.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/063-continuation.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/063-continuation.pt @@ -0,0 +1,4 @@ +
+ ${foo} +
\ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/063.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/063.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/063.xml @@ -0,0 +1,4 @@ + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/064-tags-and-special-characters.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/064-tags-and-special-characters.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/064-tags-and-special-characters.pt @@ -0,0 +1,4 @@ + +
+
+
\ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/064.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/064.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/064.xml @@ -0,0 +1,4 @@ + +]> +𐀀􏿽 diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/065-use-macro-in-fill.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/065-use-macro-in-fill.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/065-use-macro-in-fill.pt @@ -0,0 +1,6 @@ + + + <div metal:fill-slot="content">Content</div> +</html> \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/065.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/065.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/065.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ENTITY e "<"> +<!ELEMENT doc (#PCDATA)> +]> +<doc></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/066-load-expression.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/066-load-expression.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/066-load-expression.pt @@ -0,0 +1,1 @@ +<html tal:define="hello_world load: hello_world.pt" metal:use-macro="hello_world" /> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/066.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/066.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/066.xml @@ -0,0 +1,7 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ATTLIST doc a1 CDATA #IMPLIED> +<!-- 34 is double quote --> +<!ENTITY e1 """> +]> +<doc a1="&e1;"></doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/067-attribute-decode.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/067-attribute-decode.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/067-attribute-decode.pt @@ -0,0 +1,6 @@ +<html> + <body> + <img src="#" tal:attributes="class 1 > 0 and 'up' or 0 < 1 and 'down';" /> + <img src="#" tal:attributes="class 0 > 1 and 'up' or 0 < 1 and 'down';" /> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/067.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/067.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/067.xml @@ -0,0 +1,4 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +]> +<doc> </doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/068-less-than-greater-than-in-attributes.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/068-less-than-greater-than-in-attributes.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/068-less-than-greater-than-in-attributes.pt @@ -0,0 +1,8 @@ +<html> + <body> + <span tal:content="string:0 < 1 or 0 > 1" /> + <span tal:content="structure string:0 < 1 or 0 > 1" /> + <span class="0 < 1 or 0 > 1" /> + <span>0 < 1 or 0 > 1</span> + </body> +</html> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/068.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/068.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/068.xml @@ -0,0 +1,5 @@ +<!DOCTYPE doc [ +<!ELEMENT doc (#PCDATA)> +<!ENTITY e " "> +]> +<doc>&e;</doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/069-translation-domain-and-macro.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/069-translation-domain-and-macro.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/069-translation-domain-and-macro.pt @@ -0,0 +1,3 @@ +<html metal:use-macro="load('032-master-template.pt').macros['main']"> + <title metal:fill-slot="title" i18n:domain="test" i18n:translate="title">Title + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/069.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/069.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/069.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/070-translation-domain-and-use-macro.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/070-translation-domain-and-use-macro.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/070-translation-domain-and-use-macro.pt @@ -0,0 +1,3 @@ + + Title + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/070.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/070.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/070.xml @@ -0,0 +1,5 @@ +"> +%e; +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/071-html-attribute-defaults.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/071-html-attribute-defaults.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/071-html-attribute-defaults.pt @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/071.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/071.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/071.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/072-repeat-interpolation.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/072-repeat-interpolation.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/072-repeat-interpolation.pt @@ -0,0 +1,13 @@ + + +
    +
  • ${i}
  • +
+
    +
  • ${i}
  • +
+
    +
  • ${i}
  • +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/072.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/072.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/072.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/073-utf8-encoded.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/073-utf8-encoded.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/073-utf8-encoded.pt @@ -0,0 +1,5 @@ + + +${'my title'} ??? ${'my site'} + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/073.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/073.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/073.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/074-encoded-template.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/074-encoded-template.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/074-encoded-template.pt @@ -0,0 +1,5 @@ + + +${'my title'} ? ${'my site'} + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/074.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/074.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/074.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/075-nested-macros.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/075-nested-macros.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/075-nested-macros.pt @@ -0,0 +1,11 @@ + + + + + + foo + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/075.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/075.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/075.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/076-nested-macro-override.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/076-nested-macro-override.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/076-nested-macro-override.pt @@ -0,0 +1,3 @@ + + bar + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/076.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/076.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/076.xml @@ -0,0 +1,7 @@ + + + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/077-i18n-attributes.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/077-i18n-attributes.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/077-i18n-attributes.pt @@ -0,0 +1,1 @@ + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/077.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/077.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/077.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/078-tags-and-newlines.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/078-tags-and-newlines.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/078-tags-and-newlines.pt @@ -0,0 +1,23 @@ + + + + + + , + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/078.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/078.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/078.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/079-implicit-i18n.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/079-implicit-i18n.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/079-implicit-i18n.pt @@ -0,0 +1,16 @@ + + + Welcome + + +

Welcome

+ An edge case: ${. + Site logo + Site logo +
+ foo. +
+ bar. +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/079.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/079.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/079.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/080-xmlns-namespace-on-tal.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/080-xmlns-namespace-on-tal.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/080-xmlns-namespace-on-tal.pt @@ -0,0 +1,6 @@ + + Hello world + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/080.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/080.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/080.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/081-load-spec.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/081-load-spec.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/081-load-spec.pt @@ -0,0 +1,1 @@ + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/081.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/081.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/081.xml @@ -0,0 +1,7 @@ + + + + +]> +
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/082-load-spec-computed.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/082-load-spec-computed.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/082-load-spec-computed.pt @@ -0,0 +1,1 @@ + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/082.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/082.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/082.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/083-template-dict-to-macro.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/083-template-dict-to-macro.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/083-template-dict-to-macro.pt @@ -0,0 +1,2 @@ + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/083.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/083.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/083.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/084-interpolation-in-cdata.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/084-interpolation-in-cdata.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/084-interpolation-in-cdata.pt @@ -0,0 +1,9 @@ + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/084.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/084.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/084.xml @@ -0,0 +1,1 @@ +]> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/085-nested-translation.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/085-nested-translation.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/085-nested-translation.pt @@ -0,0 +1,11 @@ + + + Welcome + + +

Welcome

+

+ Click here to continue. +

+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/085.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/085.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/085.xml @@ -0,0 +1,6 @@ + +"> + +]> +&e; diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/086-self-closing.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/086-self-closing.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/086-self-closing.pt @@ -0,0 +1,10 @@ + + +
+
+ Chart +
+
+
+ + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/086.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/086.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/086.xml @@ -0,0 +1,6 @@ + + +"> +]> +&e; diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/087-code-blocks.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/087-code-blocks.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/087-code-blocks.pt @@ -0,0 +1,28 @@ + + +
    +
  • +
+ + + +
    +
  • +
+ +
+ + Please input a number from the range ${", ".join(numbers)}. +
+ +
+ + 41 + 1 = ${function(41)}. +
\ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/087.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/087.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/087.xml @@ -0,0 +1,6 @@ + + + +]> +&e; diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/088-python-newlines.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/088-python-newlines.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/088-python-newlines.pt @@ -0,0 +1,2 @@ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/088.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/088.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/088.xml @@ -0,0 +1,5 @@ + +"> +]> +&e; diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/089.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/089.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/089.xml @@ -0,0 +1,5 @@ + + +]> +&e; diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/090.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/090.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/090.xml @@ -0,0 +1,7 @@ + + + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/091.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/091.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/091.xml @@ -0,0 +1,7 @@ + + + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/092.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/092.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/092.xml @@ -0,0 +1,10 @@ + + +]> + + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/093.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/093.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/093.xml @@ -0,0 +1,5 @@ + +]> + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/094.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/094.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/094.xml @@ -0,0 +1,6 @@ + + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/095.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/095.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/095.xml @@ -0,0 +1,6 @@ + + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/096.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/096.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/096.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/097.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/097.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/097.xml @@ -0,0 +1,8 @@ + + + +%e; + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/098.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/098.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/098.xml @@ -0,0 +1,5 @@ + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/099.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/099.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/099.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/100.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/100.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/100.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/101-unclosed-tags.html b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/101-unclosed-tags.html new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/101-unclosed-tags.html @@ -0,0 +1,5 @@ + + +



Hello world

+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/101.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/101.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/101.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/102-unquoted-attributes.html b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/102-unquoted-attributes.html new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/102-unquoted-attributes.html @@ -0,0 +1,5 @@ + + +

Hello world

+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/102.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/102.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/102.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/103-simple-attribute.html b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/103-simple-attribute.html new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/103-simple-attribute.html @@ -0,0 +1,8 @@ + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/103.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/103.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/103.xml @@ -0,0 +1,4 @@ + +]> +<doc> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/104.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/104.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/104.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/105.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/105.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/105.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/106.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/106.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/106.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/107.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/107.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/107.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/108.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/108.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/108.xml @@ -0,0 +1,7 @@ + + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/109.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/109.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/109.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/110.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/110.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/110.xml @@ -0,0 +1,6 @@ + + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/111.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/111.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/111.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/112.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/112.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/112.xml @@ -0,0 +1,5 @@ + + +]> +
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/113.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/113.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/113.xml @@ -0,0 +1,5 @@ + + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/114.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/114.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/114.xml @@ -0,0 +1,5 @@ + +"> +]> +&e; diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/115.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/115.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/115.xml @@ -0,0 +1,6 @@ + + + +]> +&e1; diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/116.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/116.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/116.xml @@ -0,0 +1,5 @@ + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/117.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/117.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/117.xml @@ -0,0 +1,5 @@ + + +]> +] diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/118.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/118.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/118.xml @@ -0,0 +1,5 @@ + + +]> +] diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/119.xml b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/119.xml new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/119.xml @@ -0,0 +1,4 @@ + +]> + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/greeting.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/greeting.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/greeting.pt @@ -0,0 +1,1 @@ +
Hello, ${name | 'undefined'}.
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/hello_world.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/hello_world.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/hello_world.pt @@ -0,0 +1,5 @@ + + + ${'Hello world!'} + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/hello_world.txt b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/hello_world.txt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/inputs/hello_world.txt @@ -0,0 +1,1 @@ +${'Hello world!'} diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.html b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.html new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.html @@ -0,0 +1,7 @@ + + + Hello world! + Hello world! + + Goodbye world! + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.pt @@ -0,0 +1,9 @@ + + + Hello world! + + + + ok + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.txt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.txt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/001.txt @@ -0,0 +1,1 @@ +<&> diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/002.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/002.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/002.pt @@ -0,0 +1,13 @@ + + +
+ Hello! + Hello. +
+
+ Goodbye! + Goodbye. +
+ ok + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/003.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/003.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/003.pt @@ -0,0 +1,17 @@ + + +
Hello world!
+
Hello world!
1 + 2
Hello world!
+
Hello world!
3 +
Hello world!
5 + 6
Hello world!
+
1
+
1.0
+
True
+
False
+
0
+
+ <div>Hello world!</div> + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/004.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/004.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/004.pt @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/005.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/005.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/005.pt @@ -0,0 +1,12 @@ + + + + + Default + True + False + + Computed default + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/006.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/006.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/006.pt @@ -0,0 +1,9 @@ + + + copyright (c) 2010 + copyright (c) 2010 + copyright (c) 2010 + $ignored + <type 'str'> + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/007.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/007.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/007.pt @@ -0,0 +1,14 @@ + + + Hello world! +
Hello world!
+
Hello world!
+ <type 'str'> + && + + Hello world + $leftalone +
+
Hello world
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/008.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/008.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/008.pt @@ -0,0 +1,11 @@ + + + +
+ static +
+
+ nothing +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/009.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/009.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/009.pt @@ -0,0 +1,5 @@ + + +
Hello world!
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/010.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/010.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/010.pt @@ -0,0 +1,9 @@ + + +
1 < 2
+
2 < 3, 2&3, 2<3, 2>3
+
3 < 4
+
4 < 5
+
Hello world!
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/011-en.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/011-en.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/011-en.pt @@ -0,0 +1,9 @@ + + +
Message ('message' translation into 'en')
+
Message ('message' translation into 'en')
+
Message ('message' translation into 'en')
+
Message ('message' translation into 'en')
+ Message ('message' translation into 'en') + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/011.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/011.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/011.pt @@ -0,0 +1,9 @@ + + +
Message
+
Message
+
Message
+
Message
+ Message + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/012-en.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/012-en.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/012-en.pt @@ -0,0 +1,9 @@ + + +
Hello world! ('Hello world!' translation into 'en')
+
Hello world! ('hello_world' translation into 'en')
+
Hello world! ('Hello world!' translation into 'en')
+
Hello world! Goodbye planet! ('Hello ${first}! Goodbye ${second}!' translation into 'en')
+
Hello world! Goodbye planet! ('hello_goodbye' translation into 'en')
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/012.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/012.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/012.pt @@ -0,0 +1,9 @@ + + +
Hello world!
+
Hello world!
+
Hello world!
+
Hello world! Goodbye planet!
+
Hello world! Goodbye planet!
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/013.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/013.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/013.pt @@ -0,0 +1,22 @@ + + + + + + + + + + + +
+ [1,1] + + [1,2] +
+ [2,1] + + [2,2] +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/014.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/014.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/014.pt @@ -0,0 +1,12 @@ + + + + [3,3] + [3,4] + + + [4,3] + [4,4] + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/015-en.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/015-en.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/015-en.pt @@ -0,0 +1,5 @@ + + +
Price: Per kilo 12.5 ('Per kilo ${amount}' translation into 'en') ('Price: ${price}' translation into 'en')
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/015.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/015.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/015.pt @@ -0,0 +1,5 @@ + + +
Price: Per kilo 12.5
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/016-en.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/016-en.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/016-en.pt @@ -0,0 +1,9 @@ + + +
Hello world! ('Hello world!' translation into 'en')
+ Hello world! ('Hello world!' translation into 'en') + Hello world! ('hello_world' translation into 'en') + Hello world! ('Hello world!' translation into 'en') + Hello world! ('hello_world' translation into 'en') + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/016.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/016.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/016.pt @@ -0,0 +1,9 @@ + + +
Hello world!
+ Hello world! + Hello world! + Hello world! + Hello world! + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/017.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/017.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/017.pt @@ -0,0 +1,12 @@ + + + Hello world! + 1 + Hello world! + 23 + 4Hello world! +
Hello world!
+ Hello world! + Hello world! + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/018-en.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/018-en.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/018-en.pt @@ -0,0 +1,3 @@ +
+ october ('october' translation into 'en') 1982 ('1982' translation into 'en') ('${monthname} ${year}' translation into 'en') +
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/018.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/018.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/018.pt @@ -0,0 +1,3 @@ +
+ october 1982 +
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/019.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/019.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/019.pt @@ -0,0 +1,13 @@ + + + Hello world! + Hello world!1 + 2Hello world! + Hello world!3 + Hello world!5 + 6Hello world! + 1 + 1.0 + True + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/020.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/020.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/020.pt @@ -0,0 +1,8 @@ + + +
+
NameError thrown at 5:24.
+
+
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/021-en.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/021-en.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/021-en.pt @@ -0,0 +1,12 @@ + + +
Hello world! ('Hello world!' translation into 'en' with domain 'new')
+
Hello world! ('Hello world!' translation into 'en' with domain 'old')
+
+ Hello world! +
+
+ Hello world! +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/021.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/021.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/021.pt @@ -0,0 +1,12 @@ + + +
Hello world!
+
Hello world!
+
+ Hello world! +
+
+ Hello world! +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/022.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/022.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/022.pt @@ -0,0 +1,13 @@ + + +
+ + ok + ok +
+
+ + ok +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/023.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/023.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/023.pt @@ -0,0 +1,6 @@ + + + + ok + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/024.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/024.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/024.pt @@ -0,0 +1,14 @@ + + + + + first + + second + + + ok + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/025.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/025.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/025.pt @@ -0,0 +1,23 @@ + + +
    +
  • 1
  • +
  • 2
  • +
  • 3
  • +
  • 1
  • +
  • 2
  • +
  • 3
  • + + + 1, + + + 2, + + + 3 + + . +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/026.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/026.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/026.pt @@ -0,0 +1,17 @@ +
+
    +
  • 0
  • +
  • 1
  • +
  • 2
  • +
+
    +
  • 0
  • +
  • 1
  • +
  • 2
  • +
+
    +
  • even
  • +
  • odd
  • +
  • even
  • +
+
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/027.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/027.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/027.pt @@ -0,0 +1,7 @@ +
+ abcghi + Hello World! + Hello World! +
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/028.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/028.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/028.pt @@ -0,0 +1,5 @@ +
+ + + +
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/029.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/029.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/029.pt @@ -0,0 +1,3 @@ +
+ +
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/030.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/030.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/030.pt @@ -0,0 +1,10 @@ + + +
+ 1, 1, 2 +
+
+ 2, 3, 4 +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/031.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/031.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/031.pt @@ -0,0 +1,9 @@ +
+ Hello World! + Hello World! + Hello World! + 0 + 1 + 2 + True +
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/032.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/032.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/032.pt @@ -0,0 +1,15 @@ + + + Master template + + +
+ + + +
+ + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/033.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/033.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/033.pt @@ -0,0 +1,15 @@ + + + Master template + + +
+ + + +
+ + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/034.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/034.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/034.pt @@ -0,0 +1,15 @@ + + + Master template + + +
+ + + +
+ + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/035.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/035.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/035.pt @@ -0,0 +1,17 @@ + + + + New title + + + +
+ + + +
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/036.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/036.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/036.pt @@ -0,0 +1,15 @@ + + + New title + + +
+ + + +
+ + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/037.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/037.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/037.pt @@ -0,0 +1,15 @@ + + + Master template + + +
+ + ok + +
+ + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/038.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/038.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/038.pt @@ -0,0 +1,6 @@ + + + + ok + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/039.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/039.pt new file mode 100644 diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/040.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/040.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/040.pt @@ -0,0 +1,15 @@ + + + + + foo + + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/041.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/041.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/041.pt @@ -0,0 +1,7 @@ + + +
Hello world!
+
Hello world!
+
Goodbye
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/042.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/042.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/042.pt @@ -0,0 +1,15 @@ + + + Master template + + +
+ + + +
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/043.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/043.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/043.pt @@ -0,0 +1,11 @@ + + + + + + + My title + + + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/044.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/044.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/044.pt @@ -0,0 +1,5 @@ + + + a, b + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/045.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/045.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/045.pt @@ -0,0 +1,12 @@ + + +]> + + + ZZZ YYY XXX + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/046.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/046.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/046.pt @@ -0,0 +1,17 @@ + + + Master template + + +
+ + + +
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/047.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/047.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/047.pt @@ -0,0 +1,17 @@ + + + Master template + + +
+ + + +
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/048.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/048.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/048.pt @@ -0,0 +1,17 @@ + + + Master template + + +
+ + + +
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/049.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/049.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/049.pt @@ -0,0 +1,11 @@ + + +
amp=&amp; lt=&lt;
+
amp=& lt=<
+ + + + + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/059.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/059.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/059.pt @@ -0,0 +1,6 @@ + + + test + test + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/060.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/060.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/060.pt @@ -0,0 +1,8 @@ + + + Untitled + + +

Untitled

+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/061.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/061.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/061.pt @@ -0,0 +1,8 @@ + + + My document + + +

My document

+ + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/062.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/062.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/062.pt @@ -0,0 +1,27 @@ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ + + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/063.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/063.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/063.pt @@ -0,0 +1,3 @@ +
+ 2 +
\ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/064.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/064.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/064.pt @@ -0,0 +1,3 @@ + +
+
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/065.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/065.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/065.pt @@ -0,0 +1,13 @@ + + + Title + + +
+
Content
+
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/066.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/066.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/066.pt @@ -0,0 +1,5 @@ + + + Hello world! + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/067.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/067.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/067.pt @@ -0,0 +1,6 @@ + + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/068.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/068.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/068.pt @@ -0,0 +1,8 @@ + + + 0 < 1 or 0 > 1 + 0 < 1 or 0 > 1 + + 0 < 1 or 0 > 1 + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/069-en.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/069-en.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/069-en.pt @@ -0,0 +1,15 @@ + + + Title ('title' translation into 'en' with domain 'test') + + +
+ + + +
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/069.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/069.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/069.pt @@ -0,0 +1,15 @@ + + + Title + + +
+ + + +
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/070-en.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/070-en.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/070-en.pt @@ -0,0 +1,15 @@ + + + Title ('title' translation into 'en' with domain 'test') + + +
+ + + +
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/070.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/070.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/070.pt @@ -0,0 +1,15 @@ + + + Title + + +
+ + + +
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/071.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/071.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/071.pt @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/072.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/072.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/072.pt @@ -0,0 +1,19 @@ + + +
    +
  • 1
  • +
  • 2
  • +
  • 3
  • +
+
    +
  • 1
  • +
  • 2
  • +
  • 3
  • +
+
    +
  • 1
  • +
  • 2
  • +
  • 3
  • +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/073.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/073.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/073.pt @@ -0,0 +1,5 @@ + + +my title ??? my site + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/074.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/074.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/074.pt @@ -0,0 +1,5 @@ + + +my title ? my site + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/075.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/075.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/075.pt @@ -0,0 +1,19 @@ + + + + Master template + + +
+ + + foo + + +
+ + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/076.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/076.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/076.pt @@ -0,0 +1,17 @@ + + + + Master template + + +
+ + bar + +
+ + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/077-en.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/077-en.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/077-en.pt @@ -0,0 +1,1 @@ + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/077.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/077.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/077.pt @@ -0,0 +1,1 @@ + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/078.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/078.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/078.pt @@ -0,0 +1,11 @@ + + + + + + 1, + 2, + 3 + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/079-en.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/079-en.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/079-en.pt @@ -0,0 +1,16 @@ + + + Welcome ('Welcome' translation into 'en') + + +

Welcome ('Welcome' translation into 'en')

+ An edge case: ${. ('An edge case: ${.' translation into 'en') + Site logo ('Site logo' translation into 'en') + Site logo ('Site logo' translation into 'en') +
+ foo. ('foo.' translation into 'en') +
+ bar. ('bar.' translation into 'en') +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/079.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/079.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/079.pt @@ -0,0 +1,16 @@ + + + Welcome + + +

Welcome

+ An edge case: ${. + Site logo + Site logo +
+ foo. +
+ bar. +
+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/080.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/080.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/080.pt @@ -0,0 +1,3 @@ + + Hello world + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/081.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/081.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/081.pt @@ -0,0 +1,5 @@ + + + Hello world! + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/082.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/082.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/082.pt @@ -0,0 +1,5 @@ + + + Hello world! + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/083.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/083.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/083.pt @@ -0,0 +1,15 @@ + + + Master template + + +
+ + + +
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/084.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/084.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/084.pt @@ -0,0 +1,9 @@ + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/085-en.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/085-en.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/085-en.pt @@ -0,0 +1,9 @@ + + + Welcome + + +

Welcome

+

Click here ('Click here' translation into 'en' with domain 'new') to continue. ('${click_here} to continue.' translation into 'en' with domain 'new')

+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/085.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/085.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/085.pt @@ -0,0 +1,9 @@ + + + Welcome + + +

Welcome

+

Click here to continue.

+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/086.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/086.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/086.pt @@ -0,0 +1,18 @@ + + + Master template + + +
+
+
+ Chart +
+
+
+
+ + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/087.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/087.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/087.pt @@ -0,0 +1,25 @@ + + +
    +
  • 1
  • +
  • 2
  • +
  • 3
  • +
+ + + +
    +
  • 5
  • +
  • 7
  • +
  • 9
  • +
+ +
+ + Please input a number from the range 1, 2, 3, 4, 5, 6, 7, 8, 9. +
+ +
+ + 41 + 1 = 42. +
\ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/088.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/088.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/088.pt @@ -0,0 +1,1 @@ +a, b, c diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/101.html b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/101.html new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/101.html @@ -0,0 +1,5 @@ + + +



Hello world

+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/102.html b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/102.html new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/102.html @@ -0,0 +1,5 @@ + + +

Hello world

+ + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/103.html b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/103.html new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/103.html @@ -0,0 +1,8 @@ + + + + + diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/greeting.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/greeting.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/greeting.pt @@ -0,0 +1,1 @@ +
Hello, undefined.
diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/hello_world.pt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/hello_world.pt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/hello_world.pt @@ -0,0 +1,5 @@ + + + Hello world! + + \ No newline at end of file diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/hello_world.txt b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/hello_world.txt new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/outputs/hello_world.txt @@ -0,0 +1,1 @@ +Hello world! diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/test_doctests.py b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_doctests.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_doctests.py @@ -0,0 +1,40 @@ +import unittest +import doctest + +OPTIONFLAGS = (doctest.ELLIPSIS | + doctest.REPORT_ONLY_FIRST_FAILURE) + + +class DoctestCase(unittest.TestCase): + def __new__(self, test): + return getattr(self, test)() + + @classmethod + def test_tal(cls): + from chameleon import tal + return doctest.DocTestSuite( + tal, optionflags=OPTIONFLAGS) + + @classmethod + def test_tales(cls): + from chameleon import tales + return doctest.DocTestSuite( + tales, optionflags=OPTIONFLAGS) + + @classmethod + def test_utils(cls): + from chameleon import utils + return doctest.DocTestSuite( + utils, optionflags=OPTIONFLAGS) + + @classmethod + def test_exc(cls): + from chameleon import exc + return doctest.DocTestSuite( + exc, optionflags=OPTIONFLAGS) + + @classmethod + def test_compiler(cls): + from chameleon import compiler + return doctest.DocTestSuite( + compiler, optionflags=OPTIONFLAGS) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/test_loader.py b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_loader.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_loader.py @@ -0,0 +1,79 @@ +import unittest + + +class LoadTests: + def _makeOne(self, search_path=None, **kwargs): + klass = self._getTargetClass() + return klass(search_path, **kwargs) + + def _getTargetClass(self): + from chameleon.loader import TemplateLoader + return TemplateLoader + + def test_load_relative(self): + import os + here = os.path.join(os.path.dirname(__file__), "inputs") + loader = self._makeOne(search_path=[here]) + result = self._load(loader, 'hello_world.pt') + self.assertEqual(result.filename, os.path.join(here, 'hello_world.pt')) + + def test_consecutive_loads(self): + import os + here = os.path.join(os.path.dirname(__file__), "inputs") + loader = self._makeOne(search_path=[here]) + + self.assertTrue( + self._load(loader, 'hello_world.pt') is \ + self._load(loader, 'hello_world.pt')) + + def test_load_relative_badpath_in_searchpath(self): + import os + here = os.path.join(os.path.dirname(__file__), "inputs") + loader = self._makeOne(search_path=[os.path.join(here, 'none'), here]) + result = self._load(loader, 'hello_world.pt') + self.assertEqual(result.filename, os.path.join(here, 'hello_world.pt')) + + def test_load_abs(self): + import os + here = os.path.join(os.path.dirname(__file__), "inputs") + loader = self._makeOne() + abs = os.path.join(here, 'hello_world.pt') + result = self._load(loader, abs) + self.assertEqual(result.filename, abs) + + +class LoadPageTests(unittest.TestCase, LoadTests): + def _load(self, loader, filename): + from chameleon.zpt import template + return loader.load(filename, template.PageTemplateFile) + + +class ZPTLoadTests(unittest.TestCase): + def _makeOne(self, *args, **kwargs): + import os + here = os.path.join(os.path.dirname(__file__), "inputs") + from chameleon.zpt import loader + return loader.TemplateLoader(here, **kwargs) + + def test_load_xml(self): + loader = self._makeOne() + template = loader.load("hello_world.pt", "xml") + from chameleon.zpt.template import PageTemplateFile + self.assertTrue(isinstance(template, PageTemplateFile)) + + def test_load_text(self): + loader = self._makeOne() + template = loader.load("hello_world.txt", "text") + from chameleon.zpt.template import PageTextTemplateFile + self.assertTrue(isinstance(template, PageTextTemplateFile)) + + def test_load_getitem_gets_xml_file(self): + loader = self._makeOne() + template = loader["hello_world.pt"] + from chameleon.zpt.template import PageTemplateFile + self.assertTrue(isinstance(template, PageTemplateFile)) + + +def test_suite(): + import sys + return unittest.findTestCases(sys.modules[__name__]) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/test_parser.py b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_parser.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_parser.py @@ -0,0 +1,92 @@ +from __future__ import with_statement + +import sys + +from unittest import TestCase + +from ..namespaces import XML_NS +from ..namespaces import XMLNS_NS +from ..namespaces import PY_NS + + +class ParserTest(TestCase): + def test_sample_files(self): + import os + import traceback + path = os.path.join(os.path.dirname(__file__), "inputs") + for filename in os.listdir(path): + if not filename.endswith('.html'): + continue + + with open(os.path.join(path, filename), 'rb') as f: + source = f.read() + + from ..utils import read_encoded + try: + want = read_encoded(source) + except UnicodeDecodeError: + exc = sys.exc_info()[1] + self.fail("%s - %s" % (exc, filename)) + + from ..tokenize import iter_xml + from ..parser import ElementParser + try: + tokens = iter_xml(want) + parser = ElementParser(tokens, { + 'xmlns': XMLNS_NS, + 'xml': XML_NS, + 'py': PY_NS, + }) + elements = tuple(parser) + except: + self.fail(traceback.format_exc()) + + output = [] + + def render(kind, args): + if kind == 'element': + # start tag + tag, end, children = args + output.append("%(prefix)s%(name)s" % tag) + + for attr in tag['attrs']: + output.append( + "%(space)s%(name)s%(eq)s%(quote)s%(value)s%(quote)s" % \ + attr + ) + + output.append("%(suffix)s" % tag) + + # children + for item in children: + render(*item) + + # end tag + output.append( + "%(prefix)s%(name)s%(space)s%(suffix)s" % end + ) + elif kind == 'text': + text = args[0] + output.append(text) + elif kind == 'start_tag': + node = args[0] + output.append( + "%(prefix)s%(name)s%(space)s%(suffix)s" % node + ) + else: + raise RuntimeError("Not implemented: %s." % kind) + + for kind, args in elements: + render(kind, args) + + got = "".join(output) + + from doctest import OutputChecker + checker = OutputChecker() + + if checker.check_output(want, got, 0) is False: + from doctest import Example + example = Example(f.name, want) + diff = checker.output_difference( + example, got, 0) + self.fail("(%s) - \n%s" % (f.name, diff)) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/test_sniffing.py b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_sniffing.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_sniffing.py @@ -0,0 +1,124 @@ +from __future__ import with_statement + +import os +import unittest +import tempfile +import shutil + +from chameleon.utils import unicode_string +from chameleon.utils import encode_string + + +class TypeSniffingTestCase(unittest.TestCase): + def setUp(self): + self.tempdir = tempfile.mkdtemp(prefix='chameleon-tests') + + def tearDown(self): + shutil.rmtree(self.tempdir) + + def _get_temporary_file(self): + filename = os.path.join(self.tempdir, 'template.py') + assert not os.path.exists(filename) + f = open(filename, 'w') + f.flush() + f.close() + return filename + + def get_template(self, text): + fn = self._get_temporary_file() + + with open(fn, 'wb') as tmpfile: + tmpfile.write(text) + + from chameleon.template import BaseTemplateFile + + class DummyTemplateFile(BaseTemplateFile): + def cook(self, body): + self.body = body + + template = DummyTemplateFile(fn) + template.cook_check() + return template + + def check_content_type(self, text, expected_type): + from chameleon.utils import read_bytes + content_type = read_bytes(text, 'ascii')[2] + self.assertEqual(content_type, expected_type) + + def test_xml_encoding(self): + from chameleon.utils import xml_prefixes + + document1 = unicode_string( + "" + ) + document2 = unicode_string( + "" + ) + + for bom, encoding in xml_prefixes: + try: + "".encode(encoding) + except LookupError: + # System does not support this encoding + continue + + self.check_content_type(document1.encode(encoding), "text/xml") + self.check_content_type(document2.encode(encoding), "text/xml") + + HTML_PUBLIC_ID = "-//W3C//DTD HTML 4.01 Transitional//EN" + HTML_SYSTEM_ID = "http://www.w3.org/TR/html4/loose.dtd" + + # Couldn't find the code that handles this... yet. + # def test_sniffer_html_ascii(self): + # self.check_content_type( + # "" + # % self.HTML_SYSTEM_ID, + # "text/html") + # self.check_content_type( + # "sample document", + # "text/html") + + # TODO: This reflects a case that simply isn't handled by the + # sniffer; there are many, but it gets it right more often than + # before. + def donttest_sniffer_xml_simple(self): + self.check_content_type("", "text/xml") + + def test_html_default_encoding(self): + body = encode_string( + '' \ + '\xc3\x90\xc2\xa2\xc3\x90\xc2\xb5' \ + '\xc3\x91\xc2\x81\xc3\x91\xc2\x82' \ + '') + + template = self.get_template(body) + self.assertEqual(template.body, body.decode('utf-8')) + + def test_html_encoding_by_meta(self): + body = encode_string( + '' \ + '\xc3\x92\xc3\xa5\xc3\xb1\xc3\xb2' \ + '' \ + "") + + template = self.get_template(body) + self.assertEqual(template.body, body.decode('windows-1251')) + + def test_xhtml(self): + body = encode_string( + '' \ + '\xc3\x92\xc3\xa5\xc3\xb1\xc3\xb2' \ + '' \ + "") + + template = self.get_template(body) + self.assertEqual(template.body, body.decode('windows-1251')) + + +def test_suite(): + return unittest.makeSuite(TypeSniffingTestCase) + +if __name__ == "__main__": + unittest.main(defaultTest="test_suite") diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/test_templates.py b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_templates.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_templates.py @@ -0,0 +1,679 @@ +# -*- coding: utf-8 -*- + +from __future__ import with_statement + +import re +import os +import sys +import shutil +import tempfile + +from functools import wraps +from functools import partial + +try: + from unittest2 import TestCase +except ImportError: + from unittest import TestCase + + +from chameleon.utils import byte_string + + +class Message(object): + def __str__(self): + return "message" + + +class ImportTestCase(TestCase): + def test_pagetemplates(self): + from chameleon import PageTemplate + from chameleon import PageTemplateFile + from chameleon import PageTemplateLoader + + def test_pagetexttemplates(self): + from chameleon import PageTextTemplate + from chameleon import PageTextTemplateFile + + +class TemplateFileTestCase(TestCase): + @property + def _class(self): + from chameleon.template import BaseTemplateFile + + class TestTemplateFile(BaseTemplateFile): + cook_count = 0 + + def cook(self, body): + self.cook_count += 1 + self._cooked = True + + return TestTemplateFile + + def setUp(self): + self.tempdir = tempfile.mkdtemp(prefix='chameleon-tests') + + def tearDown(self): + shutil.rmtree(self.tempdir) + + def _get_temporary_file(self): + filename = os.path.join(self.tempdir, 'template.py') + assert not os.path.exists(filename) + f = open(filename, 'w') + f.flush() + f.close() + return filename + + def test_cook_check(self): + fn = self._get_temporary_file() + template = self._class(fn) + template.cook_check() + self.assertEqual(template.cook_count, 1) + + def test_auto_reload(self): + fn = self._get_temporary_file() + + # set time in past + os.utime(fn, (0, 0)) + + template = self._class(fn, auto_reload=True) + template.cook_check() + + # a second cook check makes no difference + template.cook_check() + self.assertEqual(template.cook_count, 1) + + # set current time on file + os.utime(fn, None) + + # file is reloaded + template.cook_check() + self.assertEqual(template.cook_count, 2) + + def test_relative_is_expanded_to_cwd(self): + template = self._class("___does_not_exist___") + try: + template.cook_check() + except IOError: + exc = sys.exc_info()[1] + self.assertEqual( + os.getcwd(), + os.path.dirname(exc.filename) + ) + else: + self.fail("Expected OSError.") + + +class RenderTestCase(TestCase): + root = os.path.dirname(__file__) + + def find_files(self, ext): + inputs = os.path.join(self.root, "inputs") + outputs = os.path.join(self.root, "outputs") + for filename in sorted(os.listdir(inputs)): + name, extension = os.path.splitext(filename) + if extension != ext: + continue + path = os.path.join(inputs, filename) + + # if there's no output file, treat document as static and + # expect intput equal to output + import glob + globbed = tuple(glob.iglob(os.path.join( + outputs, "%s*%s" % (name.split('-', 1)[0], ext)))) + + if not globbed: + self.fail("Missing output for: %s." % name) + + for output in globbed: + name, ext = os.path.splitext(output) + basename = os.path.basename(name) + if '-' in basename: + language = basename.split('-')[1] + else: + language = None + + yield path, output, language + + +class ZopePageTemplatesTest(RenderTestCase): + @property + def from_string(body): + from ..zpt.template import PageTemplate + return partial(PageTemplate, keep_source=True) + + @property + def from_file(body): + from ..zpt.template import PageTemplateFile + return partial(PageTemplateFile, keep_source=True) + + def template(body): + def decorator(func): + @wraps(func) + def wrapper(self): + template = self.from_string(body) + return func(self, template) + + return wrapper + return decorator + + def error(body): + def decorator(func): + @wraps(func) + def wrapper(self): + from chameleon.exc import TemplateError + try: + template = self.from_string(body) + except TemplateError: + exc = sys.exc_info()[1] + return func(self, body, exc) + else: + self.fail("Expected exception.") + + return wrapper + return decorator + + def test_syntax_error_in_strict_mode(self): + from chameleon.exc import ExpressionError + + self.assertRaises( + ExpressionError, + self.from_string, + """""", + strict=True + ) + + def test_syntax_error_in_non_strict_mode(self): + from chameleon.exc import ExpressionError + + body = """""" + template = self.from_string(body, strict=False) + + try: + template() + except ExpressionError: + exc = sys.exc_info()[1] + self.assertTrue(body[exc.offset:].startswith('bad ///')) + else: + self.fail("Expected exception") + + @error("""""") + def test_attributes_on_tal_tag_fails(self, body, exc): + self.assertTrue(body[exc.offset:].startswith('dummy')) + + @error("""""") + def test_i18n_attributes_with_non_identifiers(self, body, exc): + self.assertTrue(body[exc.offset:].startswith('foo,')) + + @error("""""") + def test_repeat_syntax_error_message(self, body, exc): + self.assertTrue(body[exc.offset:].startswith('key,value')) + + def test_encoded(self): + filename = '074-encoded-template.pt' + with open(os.path.join(self.root, 'inputs', filename), 'rb') as f: + body = f.read() + + self.from_string(body) + + def test_utf8_encoded(self): + filename = '073-utf8-encoded.pt' + with open(os.path.join(self.root, 'inputs', filename), 'rb') as f: + body = f.read() + + self.from_string(body) + + def test_unicode_decode_error(self): + template = self.from_file( + os.path.join(self.root, 'inputs', 'greeting.pt') + ) + + string = native = "the artist formerly known as ????????????" + try: + string = string.decode('utf-8') + except AttributeError: + pass + + class name: + @staticmethod + def __html__(): + # This raises a decoding exception + string.encode('utf-8').decode('ascii') + + self.fail("Expected exception raised.") + + try: + template(name=name) + except UnicodeDecodeError: + exc = sys.exc_info()[1] + formatted = str(exc) + + # There's a marker under the expression that has the + # unicode decode error + self.assertTrue('^^^^^' in formatted) + self.assertTrue(native in formatted) + else: + self.fail("expected error") + + def test_custom_encoding_for_str_or_bytes_in_content(self): + string = '
????????${text}
' + try: + string = string.decode('utf-8') + except AttributeError: + pass + + template = self.from_string(string, encoding="windows-1251") + + text = '????????' + + try: + text = text.decode('utf-8') + except AttributeError: + pass + + rendered = template(text=text.encode('windows-1251')) + + self.assertEqual( + rendered, + string.replace('${text}', text) + ) + + def test_custom_encoding_for_str_or_bytes_in_attributes(self): + string = '' + try: + string = string.decode('utf-8') + except AttributeError: + pass + + template = self.from_string(string, encoding="windows-1251") + + text = '????????' + + try: + text = text.decode('utf-8') + except AttributeError: + pass + + rendered = template(text=text.encode('windows-1251')) + + self.assertEqual( + rendered, + string.replace('${text}', text) + ) + + def test_null_translate_function(self): + template = self.from_string('${test}', translate=None) + rendered = template(test=object()) + self.assertTrue('object' in rendered) + + def test_object_substitution_coerce_to_str(self): + template = self.from_string('${test}', translate=None) + + class dummy(object): + def __repr__(inst): + self.fail("call not expected") + + def __str__(inst): + return '' + + rendered = template(test=dummy()) + self.assertEqual(rendered, '<dummy>') + + def test_repr(self): + template = self.from_file( + os.path.join(self.root, 'inputs', 'hello_world.pt') + ) + self.assertTrue(template.filename in repr(template)) + + def test_underscore_variable(self): + template = self.from_string( + "
${_dummy}
" + ) + self.assertTrue(template(), "
foo
") + + def test_trim_attribute_space(self): + document = '''
''' + + result1 = self.from_string( + document)() + + result2 = self.from_string( + document, trim_attribute_space=True)() + + self.assertEqual(result1.count(" "), 49) + self.assertEqual(result2.count(" "), 4) + self.assertTrue(" />" in result1) + self.assertTrue(" />" in result2) + + def test_exception(self): + from traceback import format_exception_only + + template = self.from_string( + "
${dummy}
" + ) + try: + template() + except: + exc = sys.exc_info()[1] + formatted = str(exc) + self.assertFalse('NameError:' in formatted) + self.assertTrue('foo' in formatted) + self.assertTrue('(1:23)' in formatted) + + formatted_exc = "\n".join(format_exception_only(type(exc), exc)) + self.assertTrue('NameError: foo' in formatted_exc) + else: + self.fail("expected error") + + def test_create_formatted_exception(self): + from chameleon.utils import create_formatted_exception + + exc = create_formatted_exception(NameError('foo'), NameError, str) + self.assertEqual(exc.args, ('foo', )) + + class MyNameError(NameError): + def __init__(self, boo): + NameError.__init__(self, boo) + self.bar = boo + + exc = create_formatted_exception(MyNameError('foo'), MyNameError, str) + self.assertEqual(exc.args, ('foo', )) + self.assertEqual(exc.bar, 'foo') + + def test_create_formatted_exception_no_subclass(self): + from chameleon.utils import create_formatted_exception + + class DifficultMetaClass(type): + def __init__(self, class_name, bases, namespace): + if not bases == (BaseException, ): + raise TypeError(bases) + + Difficult = DifficultMetaClass('Difficult', (BaseException, ), {'args': ()}) + + exc = create_formatted_exception(Difficult(), Difficult, str) + self.assertEqual(exc.args, ()) + + def test_error_handler_makes_safe_copy(self): + calls = [] + + class TestException(Exception): + def __init__(self, *args, **kwargs): + calls.append((args, kwargs)) + + def _render(stream, econtext, rcontext): + exc = TestException('foo', bar='baz') + rcontext['__error__'] = ('expression', 1, 42, 'test.pt', exc), + raise exc + + template = self.from_string("") + template._render = _render + try: + template() + except TestException: + self.assertEqual(calls, [(('foo', ), {'bar': 'baz'})]) + exc = sys.exc_info()[1] + formatted = str(exc) + self.assertTrue('TestException' in formatted) + self.assertTrue('"expression"' in formatted) + self.assertTrue('(1:42)' in formatted) + else: + self.fail("unexpected error") + + def test_double_underscore_variable(self): + from chameleon.exc import TranslationError + self.assertRaises( + TranslationError, self.from_string, + "
${__dummy}
", + ) + + def test_compiler_internals_are_disallowed(self): + from chameleon.compiler import COMPILER_INTERNALS_OR_DISALLOWED + from chameleon.exc import TranslationError + + for name in COMPILER_INTERNALS_OR_DISALLOWED: + body = "${%s}" % (name, name) + self.assertRaises(TranslationError, self.from_string, body) + + def test_fast_translate_mapping(self): + template = self.from_string( + '
' + 'foo' + '
') + + self.assertEqual(template(), '
foo
') + + def test_translate_is_not_an_internal(self): + macro = self.from_string('bar') + template = self.from_string( + ''' + + foo + + + ''') + + result = template(macro=macro) + self.assertTrue('foo' in result) + self.assertTrue('foo' in result) + + def test_literal_false(self): + template = self.from_string( + '' + '' + '' + '', + literal_false=True, + ) + + self.assertEqual( + template(), + '' + '' + '' + '', + template.source + ) + + def test_boolean_attributes(self): + template = self.from_string( + '' + '' + '' + '' + '' + '', + boolean_attributes=set(['checked']) + ) + + self.assertEqual( + template(), + '' + '' + '' + '' + '' + '', + template.source + ) + + def test_default_debug_flag(self): + from chameleon.config import DEBUG_MODE + template = self.from_file( + os.path.join(self.root, 'inputs', 'hello_world.pt'), + ) + self.assertEqual(template.debug, DEBUG_MODE) + self.assertTrue('debug' not in template.__dict__) + + def test_debug_flag_on_string(self): + from chameleon.loader import ModuleLoader + + with open(os.path.join(self.root, 'inputs', 'hello_world.pt')) as f: + source = f.read() + + template = self.from_string(source, debug=True) + + self.assertTrue(template.debug) + self.assertTrue(isinstance(template.loader, ModuleLoader)) + + def test_debug_flag_on_file(self): + from chameleon.loader import ModuleLoader + template = self.from_file( + os.path.join(self.root, 'inputs', 'hello_world.pt'), + debug=True, + ) + self.assertTrue(template.debug) + self.assertTrue(isinstance(template.loader, ModuleLoader)) + + def test_tag_mismatch(self): + from chameleon.exc import ParseError + + try: + self.from_string(""" +
+
+
+ """) + except ParseError: + exc = sys.exc_info()[1] + self.assertTrue("" in str(exc)) + else: + self.fail("Expected error.") + + +class ZopeTemplatesTestSuite(RenderTestCase): + def setUp(self): + self.temp_path = temp_path = tempfile.mkdtemp() + + @self.addCleanup + def cleanup(path=temp_path): + shutil.rmtree(path) + + def test_pt_files(self): + from ..zpt.template import PageTemplateFile + + class Literal(object): + def __init__(self, s): + self.s = s + + def __html__(self): + return self.s + + def __str__(self): + raise RuntimeError( + "%r is a literal." % self.s) + + from chameleon.loader import TemplateLoader + loader = TemplateLoader(os.path.join(self.root, "inputs")) + + self.execute( + ".pt", PageTemplateFile, + literal=Literal("
Hello world!
"), + content="
Hello world!
", + message=Message(), + load=loader.bind(PageTemplateFile), + ) + + def test_txt_files(self): + from ..zpt.template import PageTextTemplateFile + self.execute(".txt", PageTextTemplateFile) + + def execute(self, ext, factory, **kwargs): + def translate(msgid, domain=None, mapping=None, context=None, + target_language=None, default=None): + if default is None: + default = str(msgid) + + if isinstance(msgid, Message): + default = "Message" + + if mapping: + default = re.sub(r'\${([a-z_]+)}', r'%(\1)s', default) % \ + mapping + + if target_language is None: + return default + + if domain is None: + with_domain = "" + else: + with_domain = " with domain '%s'" % domain + + stripped = default.rstrip('\n ') + return "%s ('%s' translation into '%s'%s)%s" % ( + stripped, msgid, target_language, with_domain, + default[len(stripped):] + ) + + for input_path, output_path, language in self.find_files(ext): + # Make friendly title so we can locate the generated + # source when debugging + self.shortDescription = lambda: input_path + + # Very implicitly enable implicit translation based on + # a string included in the input path: + implicit_i18n = 'implicit-i18n' in input_path + implicit_i18n_attrs = ("alt", "title") if implicit_i18n else () + + template = factory( + input_path, + keep_source=True, + strict=False, + implicit_i18n_translate=implicit_i18n, + implicit_i18n_attributes=implicit_i18n_attrs, + ) + + params = kwargs.copy() + params.update({ + 'translate': translate, + 'target_language': language, + }) + + template.cook_check() + + try: + got = template.render(**params) + except: + import traceback + e = traceback.format_exc() + self.fail("%s\n\n Example source:\n\n%s" % (e, "\n".join( + ["%#03.d%s" % (lineno + 1, line and " " + line or "") + for (lineno, line) in + enumerate(template.source.split( + '\n'))]))) + + if isinstance(got, byte_string): + got = got.decode('utf-8') + + from doctest import OutputChecker + checker = OutputChecker() + + if not os.path.exists(output_path): + output = template.body + else: + with open(output_path, 'rb') as f: + output = f.read() + + from chameleon.utils import read_xml_encoding + from chameleon.utils import detect_encoding + + if template.content_type == 'text/xml': + encoding = read_xml_encoding(output) or \ + template.default_encoding + else: + content_type, encoding = detect_encoding( + output, template.default_encoding) + + want = output.decode(encoding) + + if checker.check_output(want, got, 0) is False: + from doctest import Example + example = Example(input_path, want) + diff = checker.output_difference( + example, got, 0) + self.fail("(%s) - \n%s\n\nCode:\n%s" % ( + input_path, diff.rstrip('\n'), + template.source.encode('utf-8'))) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tests/test_tokenizer.py b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_tokenizer.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tests/test_tokenizer.py @@ -0,0 +1,47 @@ +import sys + +from unittest import TestCase + + +class TokenizerTest(TestCase): + def test_sample_files(self): + import os + import traceback + path = os.path.join(os.path.dirname(__file__), "inputs") + for filename in os.listdir(path): + if not filename.endswith('.xml'): + continue + f = open(os.path.join(path, filename), 'rb') + source = f.read() + f.close() + + from ..utils import read_encoded + try: + want = read_encoded(source) + except UnicodeDecodeError: + exc = sys.exc_info()[1] + self.fail("%s - %s" % (exc, filename)) + + from ..tokenize import iter_xml + try: + tokens = iter_xml(want) + got = "".join(tokens) + except: + self.fail(traceback.format_exc()) + + from doctest import OutputChecker + checker = OutputChecker() + + if checker.check_output(want, got, 0) is False: + from doctest import Example + example = Example(f.name, want) + diff = checker.output_difference( + example, got, 0) + self.fail("(%s) - \n%s" % (f.name, diff)) + + def test_token(self): + from chameleon.tokenize import Token + token = Token("abc", 1) + + self.assertTrue(isinstance(token[1:], Token)) + self.assertEqual(token[1:].pos, 2) diff --git a/lib3/Chameleon-2.9.2/src/chameleon/tokenize.py b/lib3/Chameleon-2.9.2/src/chameleon/tokenize.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/tokenize.py @@ -0,0 +1,144 @@ +# http://code.activestate.com/recipes/65125-xml-lexing-shallow-parsing/ +# by Paul Prescod +# licensed under the PSF License +# +# modified to capture all non-overlapping parts of tokens + +import re + +try: + str = unicode +except NameError: + pass + +class recollector: + def __init__(self): + self.res = {} + + def add(self, name, reg ): + re.compile(reg) # check that it is valid + self.res[name] = reg % self.res + +collector = recollector() +a = collector.add + +a("TextSE", "[^<]+") +a("UntilHyphen", "[^-]*-") +a("Until2Hyphens", "%(UntilHyphen)s(?:[^-]%(UntilHyphen)s)*-") +a("CommentCE", "%(Until2Hyphens)s>?") +a("UntilRSBs", "[^\\]]*](?:[^\\]]+])*]+") +a("CDATA_CE", "%(UntilRSBs)s(?:[^\\]>]%(UntilRSBs)s)*>" ) +a("S", "[ \\n\\t\\r]+") +a("Simple", "[^\"'>/]+") +a("NameStrt", "[A-Za-z_:]|[^\\x00-\\x7F]") +a("NameChar", "[A-Za-z0-9_:.-]|[^\\x00-\\x7F]") +a("Name", "(?:%(NameStrt)s)(?:%(NameChar)s)*") +a("QuoteSE", "\"[^\"]*\"|'[^']*'") +a("DT_IdentSE" , "%(S)s%(Name)s(?:%(S)s(?:%(Name)s|%(QuoteSE)s))*" ) +a("MarkupDeclCE" , "(?:[^\\]\"'><]+|%(QuoteSE)s)*>" ) +a("S1", "[\\n\\r\\t ]") +a("UntilQMs", "[^?]*\\?+") +a("PI_Tail" , "\\?>|%(S1)s%(UntilQMs)s(?:[^>?]%(UntilQMs)s)*>" ) +a("DT_ItemSE", + "<(?:!(?:--%(Until2Hyphens)s>|[^-]%(MarkupDeclCE)s)|" + "\\?%(Name)s(?:%(PI_Tail)s))|%%%(Name)s;|%(S)s" +) +a("DocTypeCE" , +"%(DT_IdentSE)s(?:%(S)s)?(?:\\[(?:%(DT_ItemSE)s)*](?:%(S)s)?)?>?" ) +a("DeclCE", + "--(?:%(CommentCE)s)?|\\[CDATA\\[(?:%(CDATA_CE)s)?|" + "DOCTYPE(?:%(DocTypeCE)s)?") +a("PI_CE", "%(Name)s(?:%(PI_Tail)s)?") +a("EndTagCE", "%(Name)s(?:%(S)s)?>?") +a("AttValSE", "\"[^\"]*\"|'[^']*'") +a("ElemTagCE", + "(%(Name)s)(?:(%(S)s)(%(Name)s)(((?:%(S)s)?=(?:%(S)s)?)" + "(?:%(AttValSE)s|%(Simple)s)|(?!(?:%(S)s)?=)))*(?:%(S)s)?(/?>)?") +a("MarkupSPE", + "<(?:!(?:%(DeclCE)s)?|" + "\\?(?:%(PI_CE)s)?|/(?:%(EndTagCE)s)?|(?:%(ElemTagCE)s)?)") +a("XML_SPE", "%(TextSE)s|%(MarkupSPE)s") +a("XML_MARKUP_ONLY_SPE", "%(MarkupSPE)s") +a("ElemTagSPE", "<|%(Name)s") + +re_xml_spe = re.compile(collector.res['XML_SPE']) +re_markup_only_spe = re.compile(collector.res['XML_MARKUP_ONLY_SPE']) + + +def iter_xml(body, filename=None): + for match in re_xml_spe.finditer(body): + string = match.group() + pos = match.start() + yield Token(string, pos, body, filename) + + +def iter_text(body, filename=None): + yield Token(body, 0, body, filename) + + +class Token(str): + __slots__ = "pos", "source", "filename" + + def __new__(cls, string, pos=0, source=None, filename=None): + inst = str.__new__(cls, string) + inst.pos = pos + inst.source = source + inst.filename = filename or "" + return inst + + def __getslice__(self, i, j): + slice = str.__getslice__(self, i, j) + return Token(slice, self.pos + i, self.source, self.filename) + + def __getitem__(self, index): + s = str.__getitem__(self, index) + if isinstance(index, slice): + return Token( + s, self.pos + (index.start or 0), self.source, self.filename) + return s + + def __add__(self, other): + if other is None: + return self + + return Token( + str.__add__(self, other), self.pos, self.source, self.filename) + + def __eq__(self, other): + return str.__eq__(self, other) + + def __hash__(self): + return str.__hash__(self) + + def replace(self, *args): + s = str.replace(self, *args) + return Token(s, self.pos, self.source, self.filename) + + def split(self, *args): + l = str.split(self, *args) + pos = self.pos + for i, s in enumerate(l): + l[i] = Token(s, pos, self.source, self.filename) + pos += len(s) + return l + + def strip(self, *args): + return self.lstrip(*args).rstrip(*args) + + def lstrip(self, *args): + s = str.lstrip(self, *args) + return Token( + s, self.pos + len(self) - len(s), self.source, self.filename) + + def rstrip(self, *args): + s = str.rstrip(self, *args) + return Token(s, self.pos, self.source, self.filename) + + @property + def location(self): + if self.source is None: + return 0, self.pos + + body = self.source[:self.pos] + line = body.count('\n') + return line + 1, self.pos - body.rfind('\n', 0) - 1 diff --git a/lib3/Chameleon-2.9.2/src/chameleon/utils.py b/lib3/Chameleon-2.9.2/src/chameleon/utils.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/utils.py @@ -0,0 +1,429 @@ +import os +import re +import sys +import codecs +import logging + +from copy import copy + +version = sys.version_info[:3] + +try: + import ast as _ast +except ImportError: + from chameleon import ast24 as _ast + + +class ASTProxy(object): + aliases = { + # Python 3.3 + 'TryExcept': 'Try', + 'TryFinally': 'Try', + } + + def __getattr__(self, name): + return _ast.__dict__.get(name) or getattr(_ast, self.aliases[name]) + + +ast = ASTProxy() + +log = logging.getLogger('chameleon.utils') + +# Python 2 +if version < (3, 0, 0): + import htmlentitydefs + import __builtin__ as builtins + + from .py25 import raise_with_traceback + + chr = unichr + native_string = str + decode_string = unicode + encode_string = str + unicode_string = unicode + string_type = basestring + byte_string = str + + def safe_native(s, encoding='utf-8'): + if not isinstance(s, unicode): + s = decode_string(s, encoding, 'replace') + + return s.encode(encoding) + +# Python 3 +else: + from html import entities as htmlentitydefs + import builtins + + byte_string = bytes + string_type = str + native_string = str + decode_string = bytes.decode + encode_string = lambda s: bytes(s, 'utf-8') + unicode_string = str + + def safe_native(s, encoding='utf-8'): + if not isinstance(s, str): + s = decode_string(s, encoding, 'replace') + + return s + + def raise_with_traceback(exc, tb): + exc.__traceback__ = tb + raise exc + +def text_(s, encoding='latin-1', errors='strict'): + """ If ``s`` is an instance of ``byte_string``, return + ``s.decode(encoding, errors)``, otherwise return ``s``""" + if isinstance(s, byte_string): + return s.decode(encoding, errors) + return s + +entity_re = re.compile(r'&(#?)(x?)(\d{1,5}|\w{1,8});') + +module_cache = {} + +xml_prefixes = ( + (codecs.BOM_UTF8, 'utf-8-sig'), + (codecs.BOM_UTF16_BE, 'utf-16-be'), + (codecs.BOM_UTF16_LE, 'utf-16-le'), + (codecs.BOM_UTF16, 'utf-16'), + (codecs.BOM_UTF32_BE, 'utf-32-be'), + (codecs.BOM_UTF32_LE, 'utf-32-le'), + (codecs.BOM_UTF32, 'utf-32'), + ) + + +def _has_encoding(encoding): + try: + "".encode(encoding) + except LookupError: + return False + else: + return True + + +# Precomputed prefix table +_xml_prefixes = tuple( + (bom, str('\s*', + re.IGNORECASE + ) + +RE_ENCODING = re.compile( + r'encoding\s*=\s*(?:"|\')(?P[\w\-]+)(?:"|\')'.encode('ascii'), + re.IGNORECASE + ) + + +def read_encoded(data): + return read_bytes(data, "utf-8")[0] + + +def read_bytes(body, default_encoding): + for bom, prefix, encoding in _xml_prefixes: + if body.startswith(bom): + document = body.decode(encoding) + return document, encoding, \ + "text/xml" if document.startswith(">> mangle('hello_world.pt') + 'hello_world' + + >>> mangle('foo.bar.baz.pt') + 'foo_bar_baz' + + >>> mangle('foo-bar-baz.pt') + 'foo_bar_baz' + + """ + + base, ext = os.path.splitext(filename) + return base.replace('.', '_').replace('-', '_') + + +def char2entity(c): + cp = ord(c) + name = htmlentitydefs.codepoint2name.get(cp) + return '&%s;' % name if name is not None else '&#%d;' % cp + + +def substitute_entity(match, n2cp=htmlentitydefs.name2codepoint): + ent = match.group(3) + + if match.group(1) == "#": + if match.group(2) == '': + return chr(int(ent)) + elif match.group(2) == 'x': + return chr(int('0x' + ent, 16)) + else: + cp = n2cp.get(ent) + + if cp: + return chr(cp) + else: + return match.group() + + +def create_formatted_exception(exc, cls, formatter): + try: + try: + new = type(cls.__name__, (cls, Exception), { + '__str__': formatter, + '__new__': BaseException.__new__, + '__module__': cls.__module__, + }) + except TypeError: + new = cls + + try: + inst = BaseException.__new__(new) + except TypeError: + inst = cls.__new__(new) + + BaseException.__init__(inst, *exc.args) + inst.__dict__ = exc.__dict__ + + return inst + except ValueError: + name = type(exc).__name__ + log.warn("Unable to copy exception of type '%s'." % name) + raise TypeError(exc) + + +def unescape(string): + for name in ('lt', 'gt', 'quot'): + cp = htmlentitydefs.name2codepoint[name] + string = string.replace('&%s;' % name, chr(cp)) + + return string + + +_concat = unicode_string("").join + + +def join(stream): + """Concatenate stream. + + >>> print(join(('Hello', ' ', 'world'))) + Hello world + + >>> join(('Hello', 0)) + Traceback (most recent call last): + ... + TypeError: ... expected ... + + """ + + try: + return _concat(stream) + except: + # Loop through stream and coerce each element into unicode; + # this should raise an exception + for element in stream: + unicode_string(element) + + # In case it didn't, re-raise the original exception + raise + + +def decode_htmlentities(string): + """ + >>> native_string(decode_htmlentities('&amp;')) + '&' + + """ + + decoded = entity_re.subn(substitute_entity, string)[0] + + # preserve input token data + return string.replace(string, decoded) + + +# Taken from zope.dottedname +def _resolve_dotted(name, module=None): + name = name.split('.') + if not name[0]: + if module is None: + raise ValueError("relative name without base module") + module = module.split('.') + name.pop(0) + while not name[0]: + module.pop() + name.pop(0) + name = module + name + + used = name.pop(0) + found = __import__(used) + for n in name: + used += '.' + n + try: + found = getattr(found, n) + except AttributeError: + __import__(used) + found = getattr(found, n) + + return found + + +def resolve_dotted(dotted): + if not dotted in module_cache: + resolved = _resolve_dotted(dotted) + module_cache[dotted] = resolved + return module_cache[dotted] + + +def limit_string(s, max_length=53): + if len(s) > max_length: + return s[:max_length - 3] + '...' + + return s + + +def format_kwargs(kwargs): + items = [] + for name, value in kwargs.items(): + if isinstance(value, string_type): + short = limit_string(value) + items.append((name, short.replace('\n', '\\n'))) + elif isinstance(value, (int, float)): + items.append((name, value)) + elif isinstance(value, dict): + items.append((name, '{...} (%d)' % len(value))) + else: + items.append((name, + "<%s %s at %s>" % ( + type(value).__name__, + getattr(value, '__name__', "-"), + hex(abs(id(value)))))) + + return ["%s: %s" % item for item in items] + + +class callablestr(str): + __slots__ = () + + def __call__(self): + return self + + +class callableint(int): + __slots__ = () + + def __call__(self): + return self + + +class descriptorstr(object): + __slots__ = "function", "__name__" + + def __init__(self, function): + self.function = function + self.__name__ = function.__name__ + + def __get__(self, context, cls): + return callablestr(self.function(context)) + + +class descriptorint(object): + __slots__ = "function", "__name__" + + def __init__(self, function): + self.function = function + self.__name__ = function.__name__ + + def __get__(self, context, cls): + return callableint(self.function(context)) + + +class DebuggingOutputStream(list): + def append(self, value): + if not isinstance(value, string_type): + raise TypeError(value) + + unicode_string(value) + list.append(self, value) + + +class Scope(dict): + set_local = setLocal = dict.__setitem__ + + __slots__ = "set_global", + + def __new__(cls, *args): + inst = dict.__new__(cls, *args) + inst.set_global = inst.__setitem__ + return inst + + def __getitem__(self, key): + try: + return dict.__getitem__(self, key) + except KeyError: + raise NameError(key) + + @property + def vars(self): + return self + + def copy(self): + inst = Scope(self) + inst.set_global = self.set_global + return inst + + +class ListDictProxy(object): + def __init__(self, l): + self._l = l + + def get(self, key): + return self._l[-1].get(key) + + +class Markup(unicode_string): + def __html__(self): + return unicode_string(self) + + def __repr__(self): + return "s'%s'" % self.s diff --git a/lib3/Chameleon-2.9.2/src/chameleon/zpt/__init__.py b/lib3/Chameleon-2.9.2/src/chameleon/zpt/__init__.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/zpt/__init__.py @@ -0,0 +1,1 @@ +# diff --git a/lib3/Chameleon-2.9.2/src/chameleon/zpt/loader.py b/lib3/Chameleon-2.9.2/src/chameleon/zpt/loader.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/zpt/loader.py @@ -0,0 +1,30 @@ +from chameleon.loader import TemplateLoader as BaseLoader +from chameleon.zpt import template + + +class TemplateLoader(BaseLoader): + formats = { + "xml": template.PageTemplateFile, + "text": template.PageTextTemplateFile, + } + + default_format = "xml" + + def __init__(self, *args, **kwargs): + formats = kwargs.pop('formats', None) + if formats is not None: + self.formats = formats + + super(TemplateLoader, self).__init__(*args, **kwargs) + + def load(self, filename, format=None): + """Load and return a template file. + + The format parameter determines will parse the file. Valid + options are `xml` and `text`. + """ + + cls = self.formats[format or self.default_format] + return super(TemplateLoader, self).load(filename, cls) + + __getitem__ = load diff --git a/lib3/Chameleon-2.9.2/src/chameleon/zpt/program.py b/lib3/Chameleon-2.9.2/src/chameleon/zpt/program.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/chameleon/zpt/program.py @@ -0,0 +1,751 @@ +import re + +try: + import ast +except ImportError: + from chameleon import ast24 as ast + +try: + str = unicode +except NameError: + long = int + +from functools import partial +from copy import copy + +from ..program import ElementProgram + +from ..namespaces import XML_NS +from ..namespaces import XMLNS_NS +from ..namespaces import I18N_NS as I18N +from ..namespaces import TAL_NS as TAL +from ..namespaces import METAL_NS as METAL +from ..namespaces import META_NS as META + +from ..astutil import Static +from ..astutil import parse +from ..astutil import marker + +from .. import tal +from .. import metal +from .. import i18n +from .. import nodes + +from ..exc import LanguageError +from ..exc import ParseError +from ..exc import CompilationError + +from ..utils import decode_htmlentities + +try: + str = unicode +except NameError: + long = int + + +missing = object() + +re_trim = re.compile(r'($\s+|\s+^)', re.MULTILINE) + +def skip(node): + return node + + +def wrap(node, *wrappers): + for wrapper in reversed(wrappers): + node = wrapper(node) + return node + + +def validate_attributes(attributes, namespace, whitelist): + for ns, name in attributes: + if ns == namespace and name not in whitelist: + raise CompilationError( + "Bad attribute for namespace '%s'" % ns, name + ) + + +class MacroProgram(ElementProgram): + """Visitor class that generates a program for the ZPT language.""" + + DEFAULT_NAMESPACES = { + 'xmlns': XMLNS_NS, + 'xml': XML_NS, + 'tal': TAL, + 'metal': METAL, + 'i18n': I18N, + 'meta': META, + } + + DROP_NS = TAL, METAL, I18N, META + + VARIABLE_BLACKLIST = "default", "repeat", "nothing", \ + "convert", "decode", "translate" + + _interpolation_enabled = True + _whitespace = "\n" + _last = "" + + # Macro name (always trivial for a macro program) + name = None + + # This default marker value has the semantics that if an + # expression evaluates to that value, the expression default value + # is returned. For an attribute, if there is no default, this + # means that the attribute is dropped. + default_marker = None + + # Escape mode (true value means XML-escape) + escape = True + + # Attributes which should have boolean behavior (on true, the + # value takes the attribute name, on false, the attribute is + # dropped) + boolean_attributes = set() + + # If provided, this should be a set of attributes for implicit + # translation. Any attribute whose name is included in the set + # will be translated even without explicit markup. Note that all + # values should be lowercase strings. + implicit_i18n_attributes = set() + + # If set, text will be translated even without explicit markup. + implicit_i18n_translate = False + + # If set, additional attribute whitespace will be stripped. + trim_attribute_space = False + + def __init__(self, *args, **kwargs): + # Internal array for switch statements + self._switches = [] + + # Internal array for current use macro level + self._use_macro = [] + + # Internal array for current interpolation status + self._interpolation = [True] + + # Internal dictionary of macro definitions + self._macros = {} + + # Apply default values from **kwargs to self + self._pop_defaults( + kwargs, + 'boolean_attributes', + 'default_marker', + 'escape', + 'implicit_i18n_translate', + 'implicit_i18n_attributes', + 'trim_attribute_space', + ) + + super(MacroProgram, self).__init__(*args, **kwargs) + + @property + def macros(self): + macros = list(self._macros.items()) + macros.append((None, nodes.Sequence(self.body))) + + return tuple( + nodes.Macro(name, [nodes.Context(node)]) + for name, node in macros + ) + + def visit_default(self, node): + return nodes.Text(node) + + def visit_element(self, start, end, children): + ns = start['ns_attrs'] + + for (prefix, attr), encoded in tuple(ns.items()): + if prefix == TAL: + ns[prefix, attr] = decode_htmlentities(encoded) + + # Validate namespace attributes + validate_attributes(ns, TAL, tal.WHITELIST) + validate_attributes(ns, METAL, metal.WHITELIST) + validate_attributes(ns, I18N, i18n.WHITELIST) + + # Check attributes for language errors + self._check_attributes(start['namespace'], ns) + + # Remember whitespace for item repetition + if self._last is not None: + self._whitespace = "\n" + " " * len(self._last.rsplit('\n', 1)[-1]) + + # Set element-local whitespace + whitespace = self._whitespace + + # Set up switch + try: + clause = ns[TAL, 'switch'] + except KeyError: + switch = None + else: + switch = nodes.Value(clause) + + self._switches.append(switch) + + body = [] + + # Include macro + use_macro = ns.get((METAL, 'use-macro')) + extend_macro = ns.get((METAL, 'extend-macro')) + if use_macro or extend_macro: + slots = [] + self._use_macro.append(slots) + + if use_macro: + inner = nodes.UseExternalMacro( + nodes.Value(use_macro), slots, False + ) + else: + inner = nodes.UseExternalMacro( + nodes.Value(extend_macro), slots, True + ) + # -or- include tag + else: + content = nodes.Sequence(body) + + # tal:content + try: + clause = ns[TAL, 'content'] + except KeyError: + pass + else: + key, value = tal.parse_substitution(clause) + xlate = True if ns.get((I18N, 'translate')) == '' else False + content = self._make_content_node(value, content, key, xlate) + + if end is None: + # Make sure start-tag has opening suffix. + start['suffix'] = ">" + + # Explicitly set end-tag. + end = { + 'prefix': '' + } + + # i18n:translate + try: + clause = ns[I18N, 'translate'] + except KeyError: + pass + else: + dynamic = ns.get((TAL, 'content')) or ns.get((TAL, 'replace')) + + if not dynamic: + content = nodes.Translate(clause, content) + + # tal:attributes + try: + clause = ns[TAL, 'attributes'] + except KeyError: + TAL_ATTRIBUTES = {} + else: + TAL_ATTRIBUTES = tal.parse_attributes(clause) + + # i18n:attributes + try: + clause = ns[I18N, 'attributes'] + except KeyError: + I18N_ATTRIBUTES = {} + else: + I18N_ATTRIBUTES = i18n.parse_attributes(clause) + + # Prepare attributes from TAL language + prepared = tal.prepare_attributes( + start['attrs'], TAL_ATTRIBUTES, + I18N_ATTRIBUTES, ns, self.DROP_NS + ) + + # Create attribute nodes + STATIC_ATTRIBUTES = self._create_static_attributes(prepared) + ATTRIBUTES = self._create_attributes_nodes( + prepared, I18N_ATTRIBUTES + ) + + # Start- and end nodes + start_tag = nodes.Start( + start['name'], + self._maybe_trim(start['prefix']), + self._maybe_trim(start['suffix']), + ATTRIBUTES + ) + + end_tag = nodes.End( + end['name'], + end['space'], + self._maybe_trim(end['prefix']), + self._maybe_trim(end['suffix']), + ) if end is not None else None + + # tal:omit-tag + try: + clause = ns[TAL, 'omit-tag'] + except KeyError: + omit = False + else: + clause = clause.strip() + + if clause == "": + omit = True + else: + expression = nodes.Negate(nodes.Value(clause)) + omit = expression + + # Wrap start- and end-tags in condition + start_tag = nodes.Condition(expression, start_tag) + + if end_tag is not None: + end_tag = nodes.Condition(expression, end_tag) + + if omit is True or start['namespace'] in self.DROP_NS: + inner = content + else: + inner = nodes.Element( + start_tag, + end_tag, + content, + ) + + # Assign static attributes dictionary to "attrs" value + inner = nodes.Define( + [nodes.Alias(["attrs"], STATIC_ATTRIBUTES)], + inner, + ) + + if omit is not False: + inner = nodes.Cache([omit], inner) + + # tal:replace + try: + clause = ns[TAL, 'replace'] + except KeyError: + pass + else: + key, value = tal.parse_substitution(clause) + xlate = True if ns.get((I18N, 'translate')) == '' else False + inner = self._make_content_node(value, inner, key, xlate) + + # metal:define-slot + try: + clause = ns[METAL, 'define-slot'] + except KeyError: + DEFINE_SLOT = skip + else: + DEFINE_SLOT = partial(nodes.DefineSlot, clause) + + # tal:define + try: + clause = ns[TAL, 'define'] + except KeyError: + DEFINE = skip + else: + defines = tal.parse_defines(clause) + if defines is None: + raise ParseError("Invalid define syntax.", clause) + + DEFINE = partial( + nodes.Define, + [nodes.Assignment( + names, nodes.Value(expr), context == "local") + for (context, names, expr) in defines], + ) + + # tal:case + try: + clause = ns[TAL, 'case'] + except KeyError: + CASE = skip + else: + value = nodes.Value(clause) + for switch in reversed(self._switches): + if switch is not None: + break + else: + raise LanguageError( + "Must define switch on a parent element.", clause + ) + + CASE = lambda node: nodes.Define( + [nodes.Assignment(["default"], switch, True)], + nodes.Condition( + nodes.Equality(switch, value), + node, + ) + ) + + # tal:repeat + try: + clause = ns[TAL, 'repeat'] + except KeyError: + REPEAT = skip + else: + defines = tal.parse_defines(clause) + assert len(defines) == 1 + context, names, expr = defines[0] + + expression = nodes.Value(expr) + + REPEAT = partial( + nodes.Repeat, + names, + expression, + context == "local", + whitespace + ) + + # tal:condition + try: + clause = ns[TAL, 'condition'] + except KeyError: + CONDITION = skip + else: + expression = nodes.Value(clause) + CONDITION = partial(nodes.Condition, expression) + + # tal:switch + if switch is None: + SWITCH = skip + else: + SWITCH = partial(nodes.Cache, [switch]) + + # i18n:domain + try: + clause = ns[I18N, 'domain'] + except KeyError: + DOMAIN = skip + else: + DOMAIN = partial(nodes.Domain, clause) + + # i18n:name + try: + clause = ns[I18N, 'name'] + except KeyError: + NAME = skip + else: + if not clause.strip(): + NAME = skip + else: + NAME = partial(nodes.Name, clause) + + # The "slot" node next is the first node level that can serve + # as a macro slot + slot = wrap( + inner, + DEFINE_SLOT, + DEFINE, + CASE, + CONDITION, + REPEAT, + SWITCH, + DOMAIN, + ) + + # metal:fill-slot + try: + clause = ns[METAL, 'fill-slot'] + except KeyError: + pass + else: + index = -(1 + int(bool(use_macro or extend_macro))) + + try: + slots = self._use_macro[index] + except IndexError: + raise LanguageError( + "Cannot use metal:fill-slot without metal:use-macro.", + clause + ) + + slots = self._use_macro[index] + slots.append(nodes.FillSlot(clause, slot)) + + # metal:define-macro + try: + clause = ns[METAL, 'define-macro'] + except KeyError: + pass + else: + self._macros[clause] = slot + slot = nodes.UseInternalMacro(clause) + + slot = wrap( + slot, + NAME + ) + + # tal:on-error + try: + clause = ns[TAL, 'on-error'] + except KeyError: + ON_ERROR = skip + else: + key, value = tal.parse_substitution(clause) + translate = True if ns.get((I18N, 'translate')) == '' else False + + fallback = self._make_content_node(value, None, key, translate) + + if omit is False and start['namespace'] not in self.DROP_NS: + start_tag = copy(start_tag) + + start_tag.attributes = filter( + lambda attribute: isinstance(attribute, nodes.Attribute) and \ + isinstance(attribute.expression, ast.Str), + start_tag.attributes + ) + + if end_tag is None: + # Make sure start-tag has opening suffix. We don't + # allow self-closing element here. + start_tag.suffix = ">" + + # Explicitly set end-tag. + end_tag = nodes.End(start_tag.name, '', '',) + + fallback = nodes.Element( + start_tag, + end_tag, + fallback, + ) + + ON_ERROR = partial(nodes.OnError, fallback, 'error') + + clause = ns.get((META, 'interpolation')) + if clause in ('false', 'off'): + INTERPOLATION = False + elif clause in ('true', 'on'): + INTERPOLATION = True + elif clause is None: + INTERPOLATION = self._interpolation[-1] + else: + raise LanguageError("Bad interpolation setting.", clause) + + self._interpolation.append(INTERPOLATION) + + # Visit content body + for child in children: + body.append(self.visit(*child)) + + self._switches.pop() + self._interpolation.pop() + + if use_macro: + self._use_macro.pop() + + return wrap( + slot, + ON_ERROR + ) + + def visit_start_tag(self, start): + return self.visit_element(start, None, []) + + def visit_cdata(self, node): + if not self._interpolation[-1] or not '${' in node: + return nodes.Text(node) + + expr = nodes.Substitution(node, ()) + return nodes.Interpolation(expr, True, False) + + def visit_comment(self, node): + if node.startswith(' [key, prev, next] + dict.clear(self) + + def __setitem__(self, key, value): + if key not in self: + end = self.__end + curr = end[1] + curr[2] = end[1] = self.__map[key] = [key, curr, end] + dict.__setitem__(self, key, value) + + def __delitem__(self, key): + dict.__delitem__(self, key) + key, prev, next = self.__map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.__end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.__end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def popitem(self, last=True): + if not self: + raise KeyError('dictionary is empty') + if last: + key = reversed(self).next() + else: + key = iter(self).next() + value = self.pop(key) + return key, value + + def __reduce__(self): + items = [[k, self[k]] for k in self] + tmp = self.__map, self.__end + del self.__map, self.__end + inst_dict = vars(self).copy() + self.__map, self.__end = tmp + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def keys(self): + return list(self) + + setdefault = DictMixin.setdefault + update = DictMixin.update + pop = DictMixin.pop + values = DictMixin.values + items = DictMixin.items + iterkeys = DictMixin.iterkeys + itervalues = DictMixin.itervalues + iteritems = DictMixin.iteritems + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + + def copy(self): + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + if isinstance(other, OrderedDict): + if len(self) != len(other): + return False + for p, q in zip(self.items(), other.items()): + if p != q: + return False + return True + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other diff --git a/lib3/Chameleon-2.9.2/src/pkg_resources.py b/lib3/Chameleon-2.9.2/src/pkg_resources.py new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/src/pkg_resources.py @@ -0,0 +1,2838 @@ +"""Package resource API +-------------------- + +A resource is a logical file contained within a package, or a logical +subdirectory thereof. The package resource API expects resource names +to have their path parts separated with ``/``, *not* whatever the local +path separator is. Do not use os.path operations to manipulate resource +names being passed into the API. + +The package resource API is designed to work with normal filesystem packages, +.egg files, and unpacked .egg files. It can also work in a limited way with +.zip files and with custom PEP 302 loaders that support the ``get_data()`` +method. +""" + +import sys, os, zipimport, time, re, imp, types +from urllib.parse import urlparse, urlunparse + +try: + frozenset +except NameError: + from sets import ImmutableSet as frozenset + +# capture these to bypass sandboxing +from os import utime +try: + from os import mkdir, rename, unlink + WRITE_SUPPORT = True +except ImportError: + # no write support, probably under GAE + WRITE_SUPPORT = False + +from os import open as os_open +from os.path import isdir, split + +# This marker is used to simplify the process that checks is the +# setuptools package was installed by the Setuptools project +# or by the Distribute project, in case Setuptools creates +# a distribution with the same version. +# +# The bootstrapping script for instance, will check if this +# attribute is present to decide wether to reinstall the package +_distribute = True + +def _bypass_ensure_directory(name, mode=0o777): + # Sandbox-bypassing version of ensure_directory() + if not WRITE_SUPPORT: + raise IOError('"os.mkdir" not supported on this platform.') + dirname, filename = split(name) + if dirname and filename and not isdir(dirname): + _bypass_ensure_directory(dirname) + mkdir(dirname, mode) + + +_state_vars = {} + +def _declare_state(vartype, **kw): + g = globals() + for name, val in kw.items(): + g[name] = val + _state_vars[name] = vartype + +def __getstate__(): + state = {} + g = globals() + for k, v in _state_vars.items(): + state[k] = g['_sget_'+v](g[k]) + return state + +def __setstate__(state): + g = globals() + for k, v in state.items(): + g['_sset_'+_state_vars[k]](k, g[k], v) + return state + +def _sget_dict(val): + return val.copy() + +def _sset_dict(key, ob, state): + ob.clear() + ob.update(state) + +def _sget_object(val): + return val.__getstate__() + +def _sset_object(key, ob, state): + ob.__setstate__(state) + +_sget_none = _sset_none = lambda *args: None + + + +def get_supported_platform(): + """Return this platform's maximum compatible version. + + distutils.util.get_platform() normally reports the minimum version + of Mac OS X that would be required to *use* extensions produced by + distutils. But what we want when checking compatibility is to know the + version of Mac OS X that we are *running*. To allow usage of packages that + explicitly require a newer version of Mac OS X, we must also know the + current version of the OS. + + If this condition occurs for any other platform with a version in its + platform strings, this function should be extended accordingly. + """ + plat = get_build_platform(); m = macosVersionString.match(plat) + if m is not None and sys.platform == "darwin": + try: + plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) + except ValueError: + pass # not Mac OS X + return plat + + + + + + + + + + + + + + + + + + + + + +__all__ = [ + # Basic resource access and distribution/entry point discovery + 'require', 'run_script', 'get_provider', 'get_distribution', + 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', + 'resource_string', 'resource_stream', 'resource_filename', + 'resource_listdir', 'resource_exists', 'resource_isdir', + + # Environmental control + 'declare_namespace', 'working_set', 'add_activation_listener', + 'find_distributions', 'set_extraction_path', 'cleanup_resources', + 'get_default_cache', + + # Primary implementation classes + 'Environment', 'WorkingSet', 'ResourceManager', + 'Distribution', 'Requirement', 'EntryPoint', + + # Exceptions + 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra', + 'ExtractionError', + + # Parsing functions and string utilities + 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', + 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', + 'safe_extra', 'to_filename', + + # filesystem utilities + 'ensure_directory', 'normalize_path', + + # Distribution "precedence" constants + 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', + + # "Provider" interfaces, implementations, and registration/lookup APIs + 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', + 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', + 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', + 'register_finder', 'register_namespace_handler', 'register_loader_type', + 'fixup_namespace_packages', 'get_importer', + + # Deprecated/backward compatibility only + 'run_main', 'AvailableDistributions', +] +class ResolutionError(Exception): + """Abstract base for dependency resolution errors""" + def __repr__(self): + return self.__class__.__name__+repr(self.args) + +class VersionConflict(ResolutionError): + """An already-installed version conflicts with the requested version""" + +class DistributionNotFound(ResolutionError): + """A requested distribution was not found""" + +class UnknownExtra(ResolutionError): + """Distribution doesn't have an "extra feature" of the given name""" +_provider_factories = {} + +PY_MAJOR = sys.version[:3] +EGG_DIST = 3 +BINARY_DIST = 2 +SOURCE_DIST = 1 +CHECKOUT_DIST = 0 +DEVELOP_DIST = -1 + +def register_loader_type(loader_type, provider_factory): + """Register `provider_factory` to make providers for `loader_type` + + `loader_type` is the type or class of a PEP 302 ``module.__loader__``, + and `provider_factory` is a function that, passed a *module* object, + returns an ``IResourceProvider`` for that module. + """ + _provider_factories[loader_type] = provider_factory + +def get_provider(moduleOrReq): + """Return an IResourceProvider for the named module or requirement""" + if isinstance(moduleOrReq,Requirement): + return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] + try: + module = sys.modules[moduleOrReq] + except KeyError: + __import__(moduleOrReq) + module = sys.modules[moduleOrReq] + loader = getattr(module, '__loader__', None) + return _find_adapter(_provider_factories, loader)(module) + +def _macosx_vers(_cache=[]): + if not _cache: + import platform + version = platform.mac_ver()[0] + # fallback for MacPorts + if version == '': + import plistlib + plist = '/System/Library/CoreServices/SystemVersion.plist' + if os.path.exists(plist): + if hasattr(plistlib, 'readPlist'): + plist_content = plistlib.readPlist(plist) + if 'ProductVersion' in plist_content: + version = plist_content['ProductVersion'] + + _cache.append(version.split('.')) + return _cache[0] + +def _macosx_arch(machine): + return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine) + +def get_build_platform(): + """Return this platform's string for platform-specific distributions + + XXX Currently this is the same as ``distutils.util.get_platform()``, but it + needs some hacks for Linux and Mac OS X. + """ + try: + from distutils.util import get_platform + except ImportError: + from sysconfig import get_platform + + plat = get_platform() + if sys.platform == "darwin" and not plat.startswith('macosx-'): + try: + version = _macosx_vers() + machine = os.uname()[4].replace(" ", "_") + return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), + _macosx_arch(machine)) + except ValueError: + # if someone is running a non-Mac darwin system, this will fall + # through to the default implementation + pass + return plat + +macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") +darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") +get_platform = get_build_platform # XXX backward compat + +def compatible_platforms(provided,required): + """Can code for the `provided` platform run on the `required` platform? + + Returns true if either platform is ``None``, or the platforms are equal. + + XXX Needs compatibility checks for Linux and other unixy OSes. + """ + if provided is None or required is None or provided==required: + return True # easy case + + # Mac OS X special cases + reqMac = macosVersionString.match(required) + if reqMac: + provMac = macosVersionString.match(provided) + + # is this a Mac package? + if not provMac: + # this is backwards compatibility for packages built before + # setuptools 0.6. All packages built after this point will + # use the new macosx designation. + provDarwin = darwinVersionString.match(provided) + if provDarwin: + dversion = int(provDarwin.group(1)) + macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) + if dversion == 7 and macosversion >= "10.3" or \ + dversion == 8 and macosversion >= "10.4": + + #import warnings + #warnings.warn("Mac eggs should be rebuilt to " + # "use the macosx designation instead of darwin.", + # category=DeprecationWarning) + return True + return False # egg isn't macosx or legacy darwin + + # are they the same major version and machine type? + if provMac.group(1) != reqMac.group(1) or \ + provMac.group(3) != reqMac.group(3): + return False + + + + # is the required OS major update >= the provided one? + if int(provMac.group(2)) > int(reqMac.group(2)): + return False + + return True + + # XXX Linux and other platforms' special cases should go here + return False + + +def run_script(dist_spec, script_name): + """Locate distribution `dist_spec` and run its `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + require(dist_spec)[0].run_script(script_name, ns) + +run_main = run_script # backward compatibility + +def get_distribution(dist): + """Return a current distribution object for a Requirement or string""" + if isinstance(dist,str): dist = Requirement.parse(dist) + if isinstance(dist,Requirement): dist = get_provider(dist) + if not isinstance(dist,Distribution): + raise TypeError("Expected string, Requirement, or Distribution", dist) + return dist + +def load_entry_point(dist, group, name): + """Return `name` entry point of `group` for `dist` or raise ImportError""" + return get_distribution(dist).load_entry_point(group, name) + +def get_entry_map(dist, group=None): + """Return the entry point map for `group`, or the full entry map""" + return get_distribution(dist).get_entry_map(group) + +def get_entry_info(dist, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return get_distribution(dist).get_entry_info(group, name) + + +class IMetadataProvider: + + def has_metadata(name): + """Does the package's distribution contain the named metadata?""" + + def get_metadata(name): + """The named metadata resource as a string""" + + def get_metadata_lines(name): + """Yield named metadata resource as list of non-blank non-comment lines + + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" + + def metadata_isdir(name): + """Is the named metadata a directory? (like ``os.path.isdir()``)""" + + def metadata_listdir(name): + """List of metadata names in the directory (like ``os.listdir()``)""" + + def run_script(script_name, namespace): + """Execute the named script in the supplied namespace dictionary""" + + + + + + + + + + +class IResourceProvider(IMetadataProvider): + """An object that provides access to package resources""" + + def get_resource_filename(manager, resource_name): + """Return a true filesystem path for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_stream(manager, resource_name): + """Return a readable file-like object for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_string(manager, resource_name): + """Return a string containing the contents of `resource_name` + + `manager` must be an ``IResourceManager``""" + + def has_resource(resource_name): + """Does the package contain the named resource?""" + + def resource_isdir(resource_name): + """Is the named resource a directory? (like ``os.path.isdir()``)""" + + def resource_listdir(resource_name): + """List of resource names in the directory (like ``os.listdir()``)""" + + + + + + + + + + + + + + + +class WorkingSet(object): + """A collection of active distributions on sys.path (or a similar list)""" + + def __init__(self, entries=None): + """Create working set from list of path entries (default=sys.path)""" + self.entries = [] + self.entry_keys = {} + self.by_key = {} + self.callbacks = [] + + if entries is None: + entries = sys.path + + for entry in entries: + self.add_entry(entry) + + + def add_entry(self, entry): + """Add a path item to ``.entries``, finding any distributions on it + + ``find_distributions(entry,True)`` is used to find distributions + corresponding to the path entry, and they are added. `entry` is + always appended to ``.entries``, even if it is already present. + (This is because ``sys.path`` can contain the same value more than + once, and the ``.entries`` of the ``sys.path`` WorkingSet should always + equal ``sys.path``.) + """ + self.entry_keys.setdefault(entry, []) + self.entries.append(entry) + for dist in find_distributions(entry, True): + self.add(dist, entry, False) + + + def __contains__(self,dist): + """True if `dist` is the active distribution for its project""" + return self.by_key.get(dist.key) == dist + + + + + + def find(self, req): + """Find a distribution matching requirement `req` + + If there is an active distribution for the requested project, this + returns it as long as it meets the version requirement specified by + `req`. But, if there is an active distribution for the project and it + does *not* meet the `req` requirement, ``VersionConflict`` is raised. + If there is no active distribution for the requested project, ``None`` + is returned. + """ + dist = self.by_key.get(req.key) + if dist is not None and dist not in req: + raise VersionConflict(dist,req) # XXX add more info + else: + return dist + + def iter_entry_points(self, group, name=None): + """Yield entry point objects from `group` matching `name` + + If `name` is None, yields all entry points in `group` from all + distributions in the working set, otherwise only ones matching + both `group` and `name` are yielded (in distribution order). + """ + for dist in self: + entries = dist.get_entry_map(group) + if name is None: + for ep in list(entries.values()): + yield ep + elif name in entries: + yield entries[name] + + def run_script(self, requires, script_name): + """Locate distribution for `requires` and run `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + self.require(requires)[0].run_script(script_name, ns) + + + + def __iter__(self): + """Yield distributions for non-duplicate projects in the working set + + The yield order is the order in which the items' path entries were + added to the working set. + """ + seen = {} + for item in self.entries: + if item not in self.entry_keys: + # workaround a cache issue + continue + + for key in self.entry_keys[item]: + if key not in seen: + seen[key]=1 + yield self.by_key[key] + + def add(self, dist, entry=None, insert=True): + """Add `dist` to working set, associated with `entry` + + If `entry` is unspecified, it defaults to the ``.location`` of `dist`. + On exit from this routine, `entry` is added to the end of the working + set's ``.entries`` (if it wasn't already present). + + `dist` is only added to the working set if it's for a project that + doesn't already have a distribution in the set. If it's added, any + callbacks registered with the ``subscribe()`` method will be called. + """ + if insert: + dist.insert_on(self.entries, entry) + + if entry is None: + entry = dist.location + keys = self.entry_keys.setdefault(entry,[]) + keys2 = self.entry_keys.setdefault(dist.location,[]) + if dist.key in self.by_key: + return # ignore hidden distros + + self.by_key[dist.key] = dist + if dist.key not in keys: + keys.append(dist.key) + if dist.key not in keys2: + keys2.append(dist.key) + self._added_new(dist) + + def resolve(self, requirements, env=None, installer=None, replacement=True): + """List all distributions needed to (recursively) meet `requirements` + + `requirements` must be a sequence of ``Requirement`` objects. `env`, + if supplied, should be an ``Environment`` instance. If + not supplied, it defaults to all distributions available within any + entry or distribution in the working set. `installer`, if supplied, + will be invoked with each requirement that cannot be met by an + already-installed distribution; it should return a ``Distribution`` or + ``None``. + """ + + requirements = list(requirements)[::-1] # set up the stack + processed = {} # set of processed requirements + best = {} # key -> dist + to_activate = [] + + while requirements: + req = requirements.pop(0) # process dependencies breadth-first + if _override_setuptools(req) and replacement: + req = Requirement.parse('distribute') + + if req in processed: + # Ignore cyclic or redundant dependencies + continue + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None: + if env is None: + env = Environment(self.entries) + dist = best[req.key] = env.best_match(req, self, installer) + if dist is None: + #msg = ("The '%s' distribution was not found on this " + # "system, and is required by this application.") + #raise DistributionNotFound(msg % req) + + # unfortunately, zc.buildout uses a str(err) + # to get the name of the distribution here.. + raise DistributionNotFound(req) + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + raise VersionConflict(dist,req) # XXX put more info here + requirements.extend(dist.requires(req.extras)[::-1]) + processed[req] = True + + return to_activate # return list of distros to activate + + def find_plugins(self, + plugin_env, full_env=None, installer=None, fallback=True + ): + """Find all activatable distributions in `plugin_env` + + Example usage:: + + distributions, errors = working_set.find_plugins( + Environment(plugin_dirlist) + ) + map(working_set.add, distributions) # add plugins+libs to sys.path + print 'Could not load', errors # display errors + + The `plugin_env` should be an ``Environment`` instance that contains + only distributions that are in the project's "plugin directory" or + directories. The `full_env`, if supplied, should be an ``Environment`` + contains all currently-available distributions. If `full_env` is not + supplied, one is created automatically from the ``WorkingSet`` this + method is called on, which will typically mean that every directory on + ``sys.path`` will be scanned for distributions. + + `installer` is a standard installer callback as used by the + ``resolve()`` method. The `fallback` flag indicates whether we should + attempt to resolve older versions of a plugin if the newest version + cannot be resolved. + + This method returns a 2-tuple: (`distributions`, `error_info`), where + `distributions` is a list of the distributions found in `plugin_env` + that were loadable, along with any other distributions that are needed + to resolve their dependencies. `error_info` is a dictionary mapping + unloadable plugin distributions to an exception instance describing the + error that occurred. Usually this will be a ``DistributionNotFound`` or + ``VersionConflict`` instance. + """ + + plugin_projects = list(plugin_env) + plugin_projects.sort() # scan project names in alphabetic order + + error_info = {} + distributions = {} + + if full_env is None: + env = Environment(self.entries) + env += plugin_env + else: + env = full_env + plugin_env + + shadow_set = self.__class__([]) + list(map(shadow_set.add, self)) # put all our entries in shadow_set + + for project_name in plugin_projects: + + for dist in plugin_env[project_name]: + + req = [dist.as_requirement()] + + try: + resolvees = shadow_set.resolve(req, env, installer) + + except ResolutionError as v: + error_info[dist] = v # save error info + if fallback: + continue # try the next older version of project + else: + break # give up on this project, keep going + + else: + list(map(shadow_set.add, resolvees)) + distributions.update(dict.fromkeys(resolvees)) + + # success, no need to try any more versions of this project + break + + distributions = list(distributions) + distributions.sort() + + return distributions, error_info + + + + + + def require(self, *requirements): + """Ensure that distributions matching `requirements` are activated + + `requirements` must be a string or a (possibly-nested) sequence + thereof, specifying the distributions and versions required. The + return value is a sequence of the distributions that needed to be + activated to fulfill the requirements; all relevant distributions are + included, even if they were already activated in this working set. + """ + + needed = self.resolve(parse_requirements(requirements)) + + for dist in needed: + self.add(dist) + + return needed + + + def subscribe(self, callback): + """Invoke `callback` for all distributions (including existing ones)""" + if callback in self.callbacks: + return + self.callbacks.append(callback) + for dist in self: + callback(dist) + + + def _added_new(self, dist): + for callback in self.callbacks: + callback(dist) + + def __getstate__(self): + return (self.entries[:], self.entry_keys.copy(), self.by_key.copy(), + self.callbacks[:]) + + def __setstate__(self, xxx_todo_changeme): + (entries, keys, by_key, callbacks) = xxx_todo_changeme + self.entries = entries[:] + self.entry_keys = keys.copy() + self.by_key = by_key.copy() + self.callbacks = callbacks[:] + + + + +class Environment(object): + """Searchable snapshot of distributions on a search path""" + + def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): + """Snapshot distributions available on a search path + + Any distributions found on `search_path` are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. + + `platform` is an optional string specifying the name of the platform + that platform-specific distributions must be compatible with. If + unspecified, it defaults to the current platform. `python` is an + optional string naming the desired version of Python (e.g. ``'2.4'``); + it defaults to the current version. + + You may explicitly set `platform` (and/or `python`) to ``None`` if you + wish to map *all* distributions, not just those compatible with the + running platform or Python version. + """ + self._distmap = {} + self._cache = {} + self.platform = platform + self.python = python + self.scan(search_path) + + def can_add(self, dist): + """Is distribution `dist` acceptable for this environment? + + The distribution must match the platform and python version + requirements specified when this environment was created, or False + is returned. + """ + return (self.python is None or dist.py_version is None + or dist.py_version==self.python) \ + and compatible_platforms(dist.platform,self.platform) + + def remove(self, dist): + """Remove `dist` from the environment""" + self._distmap[dist.key].remove(dist) + + def scan(self, search_path=None): + """Scan `search_path` for distributions usable in this environment + + Any distributions found are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. Only distributions conforming to + the platform/python version defined at initialization are added. + """ + if search_path is None: + search_path = sys.path + + for item in search_path: + for dist in find_distributions(item): + self.add(dist) + + def __getitem__(self,project_name): + """Return a newest-to-oldest list of distributions for `project_name` + """ + try: + return self._cache[project_name] + except KeyError: + project_name = project_name.lower() + if project_name not in self._distmap: + return [] + + if project_name not in self._cache: + dists = self._cache[project_name] = self._distmap[project_name] + _sort_dists(dists) + + return self._cache[project_name] + + def add(self,dist): + """Add `dist` if we ``can_add()`` it and it isn't already added""" + if self.can_add(dist) and dist.has_version(): + dists = self._distmap.setdefault(dist.key,[]) + if dist not in dists: + dists.append(dist) + if dist.key in self._cache: + _sort_dists(self._cache[dist.key]) + + + def best_match(self, req, working_set, installer=None): + """Find distribution best matching `req` and usable on `working_set` + + This calls the ``find(req)`` method of the `working_set` to see if a + suitable distribution is already active. (This may raise + ``VersionConflict`` if an unsuitable version of the project is already + active in the specified `working_set`.) If a suitable distribution + isn't active, this method returns the newest distribution in the + environment that meets the ``Requirement`` in `req`. If no suitable + distribution is found, and `installer` is supplied, then the result of + calling the environment's ``obtain(req, installer)`` method will be + returned. + """ + dist = working_set.find(req) + if dist is not None: + return dist + for dist in self[req.key]: + if dist in req: + return dist + return self.obtain(req, installer) # try and download/install + + def obtain(self, requirement, installer=None): + """Obtain a distribution matching `requirement` (e.g. via download) + + Obtain a distro that matches requirement (e.g. via download). In the + base ``Environment`` class, this routine just returns + ``installer(requirement)``, unless `installer` is None, in which case + None is returned instead. This method is a hook that allows subclasses + to attempt other ways of obtaining a distribution before falling back + to the `installer` argument.""" + if installer is not None: + return installer(requirement) + + def __iter__(self): + """Yield the unique project names of the available distributions""" + for key in list(self._distmap.keys()): + if self[key]: yield key + + + + + def __iadd__(self, other): + """In-place addition of a distribution or environment""" + if isinstance(other,Distribution): + self.add(other) + elif isinstance(other,Environment): + for project in other: + for dist in other[project]: + self.add(dist) + else: + raise TypeError("Can't add %r to environment" % (other,)) + return self + + def __add__(self, other): + """Add an environment or distribution to an environment""" + new = self.__class__([], platform=None, python=None) + for env in self, other: + new += env + return new + + +AvailableDistributions = Environment # XXX backward compatibility + + +class ExtractionError(RuntimeError): + """An error occurred extracting a resource + + The following attributes are available from instances of this exception: + + manager + The resource manager that raised this exception + + cache_path + The base directory for resource extraction + + original_error + The exception instance that caused extraction to fail + """ + + + + +class ResourceManager: + """Manage resource extraction and packages""" + extraction_path = None + + def __init__(self): + self.cached_files = {} + + def resource_exists(self, package_or_requirement, resource_name): + """Does the named resource exist?""" + return get_provider(package_or_requirement).has_resource(resource_name) + + def resource_isdir(self, package_or_requirement, resource_name): + """Is the named resource an existing directory?""" + return get_provider(package_or_requirement).resource_isdir( + resource_name + ) + + def resource_filename(self, package_or_requirement, resource_name): + """Return a true filesystem path for specified resource""" + return get_provider(package_or_requirement).get_resource_filename( + self, resource_name + ) + + def resource_stream(self, package_or_requirement, resource_name): + """Return a readable file-like object for specified resource""" + return get_provider(package_or_requirement).get_resource_stream( + self, resource_name + ) + + def resource_string(self, package_or_requirement, resource_name): + """Return specified resource as a string""" + return get_provider(package_or_requirement).get_resource_string( + self, resource_name + ) + + def resource_listdir(self, package_or_requirement, resource_name): + """List the contents of the named resource directory""" + return get_provider(package_or_requirement).resource_listdir( + resource_name + ) + + def extraction_error(self): + """Give an error message for problems extracting file(s)""" + + old_exc = sys.exc_info()[1] + cache_path = self.extraction_path or get_default_cache() + + err = ExtractionError("""Can't extract file(s) to egg cache + +The following error occurred while trying to extract file(s) to the Python egg +cache: + + %s + +The Python egg cache directory is currently set to: + + %s + +Perhaps your account does not have write access to this directory? You can +change the cache directory by setting the PYTHON_EGG_CACHE environment +variable to point to an accessible directory. +""" % (old_exc, cache_path) + ) + err.manager = self + err.cache_path = cache_path + err.original_error = old_exc + raise err + + + + + + + + + + + + + + + + def get_cache_path(self, archive_name, names=()): + """Return absolute location in cache for `archive_name` and `names` + + The parent directory of the resulting path will be created if it does + not already exist. `archive_name` should be the base filename of the + enclosing egg (which may not be the name of the enclosing zipfile!), + including its ".egg" extension. `names`, if provided, should be a + sequence of path name parts "under" the egg's extraction location. + + This method should only be called by resource providers that need to + obtain an extraction location, and only for names they intend to + extract, as it tracks the generated names for possible cleanup later. + """ + extract_path = self.extraction_path or get_default_cache() + target_path = os.path.join(extract_path, archive_name+'-tmp', *names) + try: + _bypass_ensure_directory(target_path) + except: + self.extraction_error() + + self.cached_files[target_path] = 1 + return target_path + + + + + + + + + + + + + + + + + + + + def postprocess(self, tempname, filename): + """Perform any platform-specific postprocessing of `tempname` + + This is where Mac header rewrites should be done; other platforms don't + have anything special they should do. + + Resource providers should call this method ONLY after successfully + extracting a compressed resource. They must NOT call it on resources + that are already in the filesystem. + + `tempname` is the current (temporary) name of the file, and `filename` + is the name it will be renamed to by the caller after this routine + returns. + """ + + if os.name == 'posix': + # Make the resource executable + mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 + os.chmod(tempname, mode) + + + + + + + + + + + + + + + + + + + + + + + def set_extraction_path(self, path): + """Set the base path where resources will be extracted to, if needed. + + If you do not call this routine before any extractions take place, the + path defaults to the return value of ``get_default_cache()``. (Which + is based on the ``PYTHON_EGG_CACHE`` environment variable, with various + platform-specific fallbacks. See that routine's documentation for more + details.) + + Resources are extracted to subdirectories of this path based upon + information given by the ``IResourceProvider``. You may set this to a + temporary directory, but then you must call ``cleanup_resources()`` to + delete the extracted files when done. There is no guarantee that + ``cleanup_resources()`` will be able to remove all extracted files. + + (Note: you may not change the extraction path for a given resource + manager once resources have been extracted, unless you first call + ``cleanup_resources()``.) + """ + if self.cached_files: + raise ValueError( + "Can't change extraction path, files already extracted" + ) + + self.extraction_path = path + + def cleanup_resources(self, force=False): + """ + Delete all extracted resource files and directories, returning a list + of the file and directory names that could not be successfully removed. + This function does not have any concurrency protection, so it should + generally only be called when the extraction path is a temporary + directory exclusive to a single process. This method is not + automatically called; you must call it explicitly or register it as an + ``atexit`` function if you wish to ensure cleanup of a temporary + directory used for extractions. + """ + # XXX + + + +def get_default_cache(): + """Determine the default cache location + + This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. + Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the + "Application Data" directory. On all other systems, it's "~/.python-eggs". + """ + try: + return os.environ['PYTHON_EGG_CACHE'] + except KeyError: + pass + + if os.name!='nt': + return os.path.expanduser('~/.python-eggs') + + app_data = 'Application Data' # XXX this may be locale-specific! + app_homes = [ + (('APPDATA',), None), # best option, should be locale-safe + (('USERPROFILE',), app_data), + (('HOMEDRIVE','HOMEPATH'), app_data), + (('HOMEPATH',), app_data), + (('HOME',), None), + (('WINDIR',), app_data), # 95/98/ME + ] + + for keys, subdir in app_homes: + dirname = '' + for key in keys: + if key in os.environ: + dirname = os.path.join(dirname, os.environ[key]) + else: + break + else: + if subdir: + dirname = os.path.join(dirname,subdir) + return os.path.join(dirname, 'Python-Eggs') + else: + raise RuntimeError( + "Please set the PYTHON_EGG_CACHE enviroment variable" + ) + +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ + return re.sub('[^A-Za-z0-9.]+', '-', name) + + +def safe_version(version): + """Convert an arbitrary string to a standard version string + + Spaces become dots, and all other non-alphanumeric characters become + dashes, with runs of multiple dashes condensed to a single dash. + """ + version = version.replace(' ','.') + return re.sub('[^A-Za-z0-9.]+', '-', version) + + +def safe_extra(extra): + """Convert an arbitrary string to a standard 'extra' name + + Any runs of non-alphanumeric characters are replaced with a single '_', + and the result is always lowercased. + """ + return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() + + +def to_filename(name): + """Convert a project or version name to its filename-escaped form + + Any '-' characters are currently replaced with '_'. + """ + return name.replace('-','_') + + + + + + + + +class NullProvider: + """Try to implement resources and metadata for arbitrary PEP 302 loaders""" + + egg_name = None + egg_info = None + loader = None + + def __init__(self, module): + self.loader = getattr(module, '__loader__', None) + self.module_path = os.path.dirname(getattr(module, '__file__', '')) + + def get_resource_filename(self, manager, resource_name): + return self._fn(self.module_path, resource_name) + + def get_resource_stream(self, manager, resource_name): + return StringIO(self.get_resource_string(manager, resource_name)) + + def get_resource_string(self, manager, resource_name): + return self._get(self._fn(self.module_path, resource_name)) + + def has_resource(self, resource_name): + return self._has(self._fn(self.module_path, resource_name)) + + def has_metadata(self, name): + return self.egg_info and self._has(self._fn(self.egg_info,name)) + + if sys.version_info <= (3,): + def get_metadata(self, name): + if not self.egg_info: + return "" + return self._get(self._fn(self.egg_info,name)) + else: + def get_metadata(self, name): + if not self.egg_info: + return "" + return self._get(self._fn(self.egg_info,name)).decode("utf-8") + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + def resource_isdir(self,resource_name): + return self._isdir(self._fn(self.module_path, resource_name)) + + def metadata_isdir(self,name): + return self.egg_info and self._isdir(self._fn(self.egg_info,name)) + + + def resource_listdir(self,resource_name): + return self._listdir(self._fn(self.module_path,resource_name)) + + def metadata_listdir(self,name): + if self.egg_info: + return self._listdir(self._fn(self.egg_info,name)) + return [] + + def run_script(self,script_name,namespace): + script = 'scripts/'+script_name + if not self.has_metadata(script): + raise ResolutionError("No script named %r" % script_name) + script_text = self.get_metadata(script).replace('\r\n','\n') + script_text = script_text.replace('\r','\n') + script_filename = self._fn(self.egg_info,script) + namespace['__file__'] = script_filename + if os.path.exists(script_filename): + exec(compile(open(script_filename).read(), script_filename, 'exec'), namespace, namespace) + else: + from linecache import cache + cache[script_filename] = ( + len(script_text), 0, script_text.split('\n'), script_filename + ) + script_code = compile(script_text,script_filename,'exec') + exec(script_code, namespace, namespace) + + def _has(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _isdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _listdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _fn(self, base, resource_name): + if resource_name: + return os.path.join(base, *resource_name.split('/')) + return base + + def _get(self, path): + if hasattr(self.loader, 'get_data'): + return self.loader.get_data(path) + raise NotImplementedError( + "Can't perform this operation for loaders without 'get_data()'" + ) + +register_loader_type(object, NullProvider) + + +class EggProvider(NullProvider): + """Provider based on a virtual filesystem""" + + def __init__(self,module): + NullProvider.__init__(self,module) + self._setup_prefix() + + def _setup_prefix(self): + # we assume here that our metadata may be nested inside a "basket" + # of multiple eggs; that's why we use module_path instead of .archive + path = self.module_path + old = None + while path!=old: + if path.lower().endswith('.egg'): + self.egg_name = os.path.basename(path) + self.egg_info = os.path.join(path, 'EGG-INFO') + self.egg_root = path + break + old = path + path, base = os.path.split(path) + + + + + + +class DefaultProvider(EggProvider): + """Provides access to package resources in the filesystem""" + + def _has(self, path): + return os.path.exists(path) + + def _isdir(self,path): + return os.path.isdir(path) + + def _listdir(self,path): + return os.listdir(path) + + def get_resource_stream(self, manager, resource_name): + return open(self._fn(self.module_path, resource_name), 'rb') + + def _get(self, path): + stream = open(path, 'rb') + try: + return stream.read() + finally: + stream.close() + +register_loader_type(type(None), DefaultProvider) + +try: + # CPython >=3.3 + import _frozen_importlib +except ImportError: + pass +else: + register_loader_type(_frozen_importlib.SourceFileLoader, DefaultProvider) + + +class EmptyProvider(NullProvider): + """Provider that returns nothing for all requests""" + + _isdir = _has = lambda self,path: False + _get = lambda self,path: '' + _listdir = lambda self,path: [] + module_path = None + + def __init__(self): + pass + +empty_provider = EmptyProvider() + + + + +class ZipProvider(EggProvider): + """Resource support for zips and eggs""" + + eagers = None + + def __init__(self, module): + EggProvider.__init__(self,module) + self.zipinfo = zipimport._zip_directory_cache[self.loader.archive] + self.zip_pre = self.loader.archive+os.sep + + def _zipinfo_name(self, fspath): + # Convert a virtual filename (full path to file) into a zipfile subpath + # usable with the zipimport directory cache for our target archive + if fspath.startswith(self.zip_pre): + return fspath[len(self.zip_pre):] + raise AssertionError( + "%s is not a subpath of %s" % (fspath,self.zip_pre) + ) + + def _parts(self,zip_path): + # Convert a zipfile subpath into an egg-relative path part list + fspath = self.zip_pre+zip_path # pseudo-fs path + if fspath.startswith(self.egg_root+os.sep): + return fspath[len(self.egg_root)+1:].split(os.sep) + raise AssertionError( + "%s is not a subpath of %s" % (fspath,self.egg_root) + ) + + def get_resource_filename(self, manager, resource_name): + if not self.egg_name: + raise NotImplementedError( + "resource_filename() only supported for .egg, not .zip" + ) + # no need to lock for extraction, since we use temp names + zip_path = self._resource_to_zip(resource_name) + eagers = self._get_eager_resources() + if '/'.join(self._parts(zip_path)) in eagers: + for name in eagers: + self._extract_resource(manager, self._eager_to_zip(name)) + return self._extract_resource(manager, zip_path) + + def _extract_resource(self, manager, zip_path): + + if zip_path in self._index(): + for name in self._index()[zip_path]: + last = self._extract_resource( + manager, os.path.join(zip_path, name) + ) + return os.path.dirname(last) # return the extracted directory name + + zip_stat = self.zipinfo[zip_path] + t,d,size = zip_stat[5], zip_stat[6], zip_stat[3] + date_time = ( + (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd + (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc. + ) + timestamp = time.mktime(date_time) + + try: + if not WRITE_SUPPORT: + raise IOError('"os.rename" and "os.unlink" are not supported ' + 'on this platform') + + real_path = manager.get_cache_path( + self.egg_name, self._parts(zip_path) + ) + + if os.path.isfile(real_path): + stat = os.stat(real_path) + if stat.st_size==size and stat.st_mtime==timestamp: + # size and stamp match, don't bother extracting + return real_path + + outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) + os.write(outf, self.loader.get_data(zip_path)) + os.close(outf) + utime(tmpnam, (timestamp,timestamp)) + manager.postprocess(tmpnam, real_path) + + try: + rename(tmpnam, real_path) + + except os.error: + if os.path.isfile(real_path): + stat = os.stat(real_path) + + if stat.st_size==size and stat.st_mtime==timestamp: + # size and stamp match, somebody did it just ahead of + # us, so we're done + return real_path + elif os.name=='nt': # Windows, del old file and retry + unlink(real_path) + rename(tmpnam, real_path) + return real_path + raise + + except os.error: + manager.extraction_error() # report a user-friendly error + + return real_path + + def _get_eager_resources(self): + if self.eagers is None: + eagers = [] + for name in ('native_libs.txt', 'eager_resources.txt'): + if self.has_metadata(name): + eagers.extend(self.get_metadata_lines(name)) + self.eagers = eagers + return self.eagers + + def _index(self): + try: + return self._dirindex + except AttributeError: + ind = {} + for path in self.zipinfo: + parts = path.split(os.sep) + while parts: + parent = os.sep.join(parts[:-1]) + if parent in ind: + ind[parent].append(parts[-1]) + break + else: + ind[parent] = [parts.pop()] + self._dirindex = ind + return ind + + def _has(self, fspath): + zip_path = self._zipinfo_name(fspath) + return zip_path in self.zipinfo or zip_path in self._index() + + def _isdir(self,fspath): + return self._zipinfo_name(fspath) in self._index() + + def _listdir(self,fspath): + return list(self._index().get(self._zipinfo_name(fspath), ())) + + def _eager_to_zip(self,resource_name): + return self._zipinfo_name(self._fn(self.egg_root,resource_name)) + + def _resource_to_zip(self,resource_name): + return self._zipinfo_name(self._fn(self.module_path,resource_name)) + +register_loader_type(zipimport.zipimporter, ZipProvider) + + + + + + + + + + + + + + + + + + + + + + + + +class FileMetadata(EmptyProvider): + """Metadata handler for standalone PKG-INFO files + + Usage:: + + metadata = FileMetadata("/path/to/PKG-INFO") + + This provider rejects all data and metadata requests except for PKG-INFO, + which is treated as existing, and will be the contents of the file at + the provided location. + """ + + def __init__(self,path): + self.path = path + + def has_metadata(self,name): + return name=='PKG-INFO' + + def get_metadata(self,name): + if name=='PKG-INFO': + f = open(self.path,'rU') + metadata = f.read() + f.close() + return metadata + raise KeyError("No metadata except PKG-INFO is available") + + def get_metadata_lines(self,name): + return yield_lines(self.get_metadata(name)) + + + + + + + + + + + + + + + + +class PathMetadata(DefaultProvider): + """Metadata provider for egg directories + + Usage:: + + # Development eggs: + + egg_info = "/path/to/PackageName.egg-info" + base_dir = os.path.dirname(egg_info) + metadata = PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + dist = Distribution(basedir,project_name=dist_name,metadata=metadata) + + # Unpacked egg directories: + + egg_path = "/path/to/PackageName-ver-pyver-etc.egg" + metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) + dist = Distribution.from_filename(egg_path, metadata=metadata) + """ + + def __init__(self, path, egg_info): + self.module_path = path + self.egg_info = egg_info + + +class EggMetadata(ZipProvider): + """Metadata provider for .egg files""" + + def __init__(self, importer): + """Create a metadata provider from a zipimporter""" + + self.zipinfo = zipimport._zip_directory_cache[importer.archive] + self.zip_pre = importer.archive+os.sep + self.loader = importer + if importer.prefix: + self.module_path = os.path.join(importer.archive, importer.prefix) + else: + self.module_path = importer.archive + self._setup_prefix() + + +class ImpWrapper: + """PEP 302 Importer that wraps Python's "normal" import algorithm""" + + def __init__(self, path=None): + self.path = path + + def find_module(self, fullname, path=None): + subname = fullname.split(".")[-1] + if subname != fullname and self.path is None: + return None + if self.path is None: + path = None + else: + path = [self.path] + try: + file, filename, etc = imp.find_module(subname, path) + except ImportError: + return None + return ImpLoader(file, filename, etc) + + +class ImpLoader: + """PEP 302 Loader that wraps Python's "normal" import algorithm""" + + def __init__(self, file, filename, etc): + self.file = file + self.filename = filename + self.etc = etc + + def load_module(self, fullname): + try: + mod = imp.load_module(fullname, self.file, self.filename, self.etc) + finally: + if self.file: self.file.close() + # Note: we don't set __loader__ because we want the module to look + # normal; i.e. this is just a wrapper for standard import machinery + return mod + + + + +def get_importer(path_item): + """Retrieve a PEP 302 "importer" for the given path item + + If there is no importer, this returns a wrapper around the builtin import + machinery. The returned importer is only cached if it was created by a + path hook. + """ + try: + importer = sys.path_importer_cache[path_item] + except KeyError: + for hook in sys.path_hooks: + try: + importer = hook(path_item) + except ImportError: + pass + else: + break + else: + importer = None + + sys.path_importer_cache.setdefault(path_item,importer) + if importer is None: + try: + importer = ImpWrapper(path_item) + except ImportError: + pass + return importer + +try: + from pkgutil import get_importer, ImpImporter +except ImportError: + pass # Python 2.3 or 2.4, use our own implementation +else: + ImpWrapper = ImpImporter # Python 2.5, use pkgutil's implementation + del ImpLoader, ImpImporter + + + + + + +_declare_state('dict', _distribution_finders = {}) + +def register_finder(importer_type, distribution_finder): + """Register `distribution_finder` to find distributions in sys.path items + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `distribution_finder` is a callable that, passed a path + item and the importer instance, yields ``Distribution`` instances found on + that path item. See ``pkg_resources.find_on_path`` for an example.""" + _distribution_finders[importer_type] = distribution_finder + + +def find_distributions(path_item, only=False): + """Yield distributions accessible via `path_item`""" + importer = get_importer(path_item) + finder = _find_adapter(_distribution_finders, importer) + return finder(importer, path_item, only) + +def find_in_zip(importer, path_item, only=False): + metadata = EggMetadata(importer) + if metadata.has_metadata('PKG-INFO'): + yield Distribution.from_filename(path_item, metadata=metadata) + if only: + return # don't yield nested distros + for subitem in metadata.resource_listdir('/'): + if subitem.endswith('.egg'): + subpath = os.path.join(path_item, subitem) + for dist in find_in_zip(zipimport.zipimporter(subpath), subpath): + yield dist + +register_finder(zipimport.zipimporter, find_in_zip) + +def StringIO(*args, **kw): + """Thunk to load the real StringIO on demand""" + global StringIO + try: + from io import StringIO + except ImportError: + from io import StringIO + return StringIO(*args,**kw) + +def find_nothing(importer, path_item, only=False): + return () +register_finder(object,find_nothing) + +def find_on_path(importer, path_item, only=False): + """Yield distributions accessible on a sys.path directory""" + path_item = _normalize_cached(path_item) + + if os.path.isdir(path_item) and os.access(path_item, os.R_OK): + if path_item.lower().endswith('.egg'): + # unpacked egg + yield Distribution.from_filename( + path_item, metadata=PathMetadata( + path_item, os.path.join(path_item,'EGG-INFO') + ) + ) + else: + # scan for .egg and .egg-info in directory + for entry in os.listdir(path_item): + lower = entry.lower() + if lower.endswith('.egg-info') or lower.endswith('.dist-info'): + fullpath = os.path.join(path_item, entry) + if os.path.isdir(fullpath): + # egg-info directory, allow getting metadata + metadata = PathMetadata(path_item, fullpath) + else: + metadata = FileMetadata(fullpath) + yield Distribution.from_location( + path_item,entry,metadata,precedence=DEVELOP_DIST + ) + elif not only and lower.endswith('.egg'): + for dist in find_distributions(os.path.join(path_item, entry)): + yield dist + elif not only and lower.endswith('.egg-link'): + for line in open(os.path.join(path_item, entry)): + if not line.strip(): continue + for item in find_distributions(os.path.join(path_item,line.rstrip())): + yield item + break +register_finder(ImpWrapper,find_on_path) + +try: + # CPython >=3.3 + import _frozen_importlib +except ImportError: + pass +else: + register_finder(_frozen_importlib.FileFinder, find_on_path) + +_declare_state('dict', _namespace_handlers={}) +_declare_state('dict', _namespace_packages={}) + + +def register_namespace_handler(importer_type, namespace_handler): + """Register `namespace_handler` to declare namespace packages + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `namespace_handler` is a callable like this:: + + def namespace_handler(importer,path_entry,moduleName,module): + # return a path_entry to use for child packages + + Namespace handlers are only called if the importer object has already + agreed that it can handle the relevant path item, and they should only + return a subpath if the module __path__ does not already contain an + equivalent subpath. For an example namespace handler, see + ``pkg_resources.file_ns_handler``. + """ + _namespace_handlers[importer_type] = namespace_handler + +def _handle_ns(packageName, path_item): + """Ensure that named package includes a subpath of path_item (if needed)""" + importer = get_importer(path_item) + if importer is None: + return None + loader = importer.find_module(packageName) + if loader is None: + return None + module = sys.modules.get(packageName) + if module is None: + module = sys.modules[packageName] = types.ModuleType(packageName) + module.__path__ = []; _set_parent_ns(packageName) + elif not hasattr(module,'__path__'): + raise TypeError("Not a package:", packageName) + handler = _find_adapter(_namespace_handlers, importer) + subpath = handler(importer,path_item,packageName,module) + if subpath is not None: + path = module.__path__; path.append(subpath) + loader.load_module(packageName); module.__path__ = path + return subpath + +def declare_namespace(packageName): + """Declare that package 'packageName' is a namespace package""" + + imp.acquire_lock() + try: + if packageName in _namespace_packages: + return + + path, parent = sys.path, None + if '.' in packageName: + parent = '.'.join(packageName.split('.')[:-1]) + declare_namespace(parent) + if parent not in _namespace_packages: + __import__(parent) + try: + path = sys.modules[parent].__path__ + except AttributeError: + raise TypeError("Not a package:", parent) + + # Track what packages are namespaces, so when new path items are added, + # they can be updated + _namespace_packages.setdefault(parent,[]).append(packageName) + _namespace_packages.setdefault(packageName,[]) + + for path_item in path: + # Ensure all the parent's path items are reflected in the child, + # if they apply + _handle_ns(packageName, path_item) + + finally: + imp.release_lock() + +def fixup_namespace_packages(path_item, parent=None): + """Ensure that previously-declared namespace packages include path_item""" + imp.acquire_lock() + try: + for package in _namespace_packages.get(parent,()): + subpath = _handle_ns(package, path_item) + if subpath: fixup_namespace_packages(subpath,package) + finally: + imp.release_lock() + +def file_ns_handler(importer, path_item, packageName, module): + """Compute an ns-package subpath for a filesystem or zipfile importer""" + + subpath = os.path.join(path_item, packageName.split('.')[-1]) + normalized = _normalize_cached(subpath) + for item in module.__path__: + if _normalize_cached(item)==normalized: + break + else: + # Only return the path if it's not already there + return subpath + +register_namespace_handler(ImpWrapper,file_ns_handler) +register_namespace_handler(zipimport.zipimporter,file_ns_handler) + +try: + # CPython >=3.3 + import _frozen_importlib +except ImportError: + pass +else: + register_namespace_handler(_frozen_importlib.FileFinder, file_ns_handler) + + +def null_ns_handler(importer, path_item, packageName, module): + return None + +register_namespace_handler(object,null_ns_handler) + + +def normalize_path(filename): + """Normalize a file/dir name for comparison purposes""" + return os.path.normcase(os.path.realpath(filename)) + +def _normalize_cached(filename,_cache={}): + try: + return _cache[filename] + except KeyError: + _cache[filename] = result = normalize_path(filename) + return result + +def _set_parent_ns(packageName): + parts = packageName.split('.') + name = parts.pop() + if parts: + parent = '.'.join(parts) + setattr(sys.modules[parent], name, sys.modules[packageName]) + + +def yield_lines(strs): + """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" + if isinstance(strs,str): + for s in strs.splitlines(): + s = s.strip() + if s and not s.startswith('#'): # skip blank lines/comments + yield s + else: + for ss in strs: + for s in yield_lines(ss): + yield s + +LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment +CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation +DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra +VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info +COMMA = re.compile(r"\s*,").match # comma between items +OBRACKET = re.compile(r"\s*\[").match +CBRACKET = re.compile(r"\s*\]").match +MODULE = re.compile(r"\w+(\.\w+)*$").match +EGG_NAME = re.compile( + r"(?P[^-]+)" + r"( -(?P[^-]+) (-py(?P[^-]+) (-(?P.+))? )? )?", + re.VERBOSE | re.IGNORECASE +).match + +component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) +replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get + +def _parse_version_parts(s): + for part in component_re.split(s): + part = replace(part,part) + if part in ['', '.']: + continue + if part[:1] in '0123456789': + yield part.zfill(8) # pad for numeric comparison + else: + yield '*'+part + + yield '*final' # ensure that alpha/beta/candidate are before final + +def parse_version(s): + """Convert a version string to a chronologically-sortable key + + This is a rough cross between distutils' StrictVersion and LooseVersion; + if you give it versions that would work with StrictVersion, then it behaves + the same; otherwise it acts like a slightly-smarter LooseVersion. It is + *possible* to create pathological version coding schemes that will fool + this parser, but they should be very rare in practice. + + The returned value will be a tuple of strings. Numeric portions of the + version are padded to 8 digits so they will compare numerically, but + without relying on how numbers compare relative to strings. Dots are + dropped, but dashes are retained. Trailing zeros between alpha segments + or dashes are suppressed, so that e.g. "2.4.0" is considered the same as + "2.4". Alphanumeric parts are lower-cased. + + The algorithm assumes that strings like "-" and any alpha string that + alphabetically follows "final" represents a "patch level". So, "2.4-1" + is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is + considered newer than "2.4-1", which in turn is newer than "2.4". + + Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that + come before "final" alphabetically) are assumed to be pre-release versions, + so that the version "2.4" is considered newer than "2.4a1". + + Finally, to handle miscellaneous cases, the strings "pre", "preview", and + "rc" are treated as if they were "c", i.e. as though they were release + candidates, and therefore are not as new as a version string that does not + contain them, and "dev" is replaced with an '@' so that it sorts lower than + than any other pre-release tag. + """ + parts = [] + for part in _parse_version_parts(s.lower()): + if part.startswith('*'): + # remove trailing zeros from each series of numeric parts + while parts and parts[-1]=='00000000': + parts.pop() + parts.append(part) + return tuple(parts) + +class EntryPoint(object): + """Object representing an advertised importable object""" + + def __init__(self, name, module_name, attrs=(), extras=(), dist=None): + if not MODULE(module_name): + raise ValueError("Invalid module name", module_name) + self.name = name + self.module_name = module_name + self.attrs = tuple(attrs) + self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras + self.dist = dist + + def __str__(self): + s = "%s = %s" % (self.name, self.module_name) + if self.attrs: + s += ':' + '.'.join(self.attrs) + if self.extras: + s += ' [%s]' % ','.join(self.extras) + return s + + def __repr__(self): + return "EntryPoint.parse(%r)" % str(self) + + def load(self, require=True, env=None, installer=None): + if require: self.require(env, installer) + entry = __import__(self.module_name, globals(),globals(), ['__name__']) + for attr in self.attrs: + try: + entry = getattr(entry,attr) + except AttributeError: + raise ImportError("%r has no %r attribute" % (entry,attr)) + return entry + + def require(self, env=None, installer=None): + if self.extras and not self.dist: + raise UnknownExtra("Can't require() without a distribution", self) + list(map(working_set.add, + working_set.resolve(self.dist.requires(self.extras),env,installer))) + + + + #@classmethod + def parse(cls, src, dist=None): + """Parse a single entry point from string `src` + + Entry point syntax follows the form:: + + name = some.module:some.attr [extra1,extra2] + + The entry name and module name are required, but the ``:attrs`` and + ``[extras]`` parts are optional + """ + try: + attrs = extras = () + name,value = src.split('=',1) + if '[' in value: + value,extras = value.split('[',1) + req = Requirement.parse("x["+extras) + if req.specs: raise ValueError + extras = req.extras + if ':' in value: + value,attrs = value.split(':',1) + if not MODULE(attrs.rstrip()): + raise ValueError + attrs = attrs.rstrip().split('.') + except ValueError: + raise ValueError( + "EntryPoint must be in 'name=module:attrs [extras]' format", + src + ) + else: + return cls(name.strip(), value.strip(), attrs, extras, dist) + + parse = classmethod(parse) + + + + + + + + + #@classmethod + def parse_group(cls, group, lines, dist=None): + """Parse an entry point group""" + if not MODULE(group): + raise ValueError("Invalid group name", group) + this = {} + for line in yield_lines(lines): + ep = cls.parse(line, dist) + if ep.name in this: + raise ValueError("Duplicate entry point", group, ep.name) + this[ep.name]=ep + return this + + parse_group = classmethod(parse_group) + + #@classmethod + def parse_map(cls, data, dist=None): + """Parse a map of entry point groups""" + if isinstance(data,dict): + data = list(data.items()) + else: + data = split_sections(data) + maps = {} + for group, lines in data: + if group is None: + if not lines: + continue + raise ValueError("Entry points must be listed in groups") + group = group.strip() + if group in maps: + raise ValueError("Duplicate group name", group) + maps[group] = cls.parse_group(group, lines, dist) + return maps + + parse_map = classmethod(parse_map) + + +def _remove_md5_fragment(location): + if not location: + return '' + parsed = urlparse(location) + if parsed[-1].startswith('md5='): + return urlunparse(parsed[:-1] + ('',)) + return location + + +class Distribution(object): + """Wrap an actual or potential sys.path entry w/metadata""" + PKG_INFO = 'PKG-INFO' + + def __init__(self, + location=None, metadata=None, project_name=None, version=None, + py_version=PY_MAJOR, platform=None, precedence = EGG_DIST + ): + self.project_name = safe_name(project_name or 'Unknown') + if version is not None: + self._version = safe_version(version) + self.py_version = py_version + self.platform = platform + self.location = location + self.precedence = precedence + self._provider = metadata or empty_provider + + #@classmethod + def from_location(cls,location,basename,metadata=None,**kw): + project_name, version, py_version, platform = [None]*4 + basename, ext = os.path.splitext(basename) + if ext.lower() in _distributionImpl: + # .dist-info gets much metadata differently + match = EGG_NAME(basename) + if match: + project_name, version, py_version, platform = match.group( + 'name','ver','pyver','plat' + ) + cls = _distributionImpl[ext.lower()] + return cls( + location, metadata, project_name=project_name, version=version, + py_version=py_version, platform=platform, **kw + ) + from_location = classmethod(from_location) + + + hashcmp = property( + lambda self: ( + getattr(self,'parsed_version',()), + self.precedence, + self.key, + _remove_md5_fragment(self.location), + self.py_version, + self.platform + ) + ) + def __hash__(self): return hash(self.hashcmp) + def __lt__(self, other): + return self.hashcmp < other.hashcmp + def __le__(self, other): + return self.hashcmp <= other.hashcmp + def __gt__(self, other): + return self.hashcmp > other.hashcmp + def __ge__(self, other): + return self.hashcmp >= other.hashcmp + def __eq__(self, other): + if not isinstance(other, self.__class__): + # It's not a Distribution, so they are not equal + return False + return self.hashcmp == other.hashcmp + def __ne__(self, other): + return not self == other + + # These properties have to be lazy so that we don't have to load any + # metadata until/unless it's actually needed. (i.e., some distributions + # may not know their name or version without loading PKG-INFO) + + #@property + def key(self): + try: + return self._key + except AttributeError: + self._key = key = self.project_name.lower() + return key + key = property(key) + + #@property + def parsed_version(self): + try: + return self._parsed_version + except AttributeError: + self._parsed_version = pv = parse_version(self.version) + return pv + + parsed_version = property(parsed_version) + + #@property + def version(self): + try: + return self._version + except AttributeError: + for line in self._get_metadata(self.PKG_INFO): + if line.lower().startswith('version:'): + self._version = safe_version(line.split(':',1)[1].strip()) + return self._version + else: + raise ValueError( + "Missing 'Version:' header and/or %s file" % self.PKG_INFO, self + ) + version = property(version) + + + + + #@property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + dm = self.__dep_map = {None: []} + for name in 'requires.txt', 'depends.txt': + for extra,reqs in split_sections(self._get_metadata(name)): + if extra: extra = safe_extra(extra) + dm.setdefault(extra,[]).extend(parse_requirements(reqs)) + return dm + _dep_map = property(_dep_map) + + def requires(self,extras=()): + """List of Requirements needed for this distro if `extras` are used""" + dm = self._dep_map + deps = [] + deps.extend(dm.get(None,())) + for ext in extras: + try: + deps.extend(dm[safe_extra(ext)]) + except KeyError: + raise UnknownExtra( + "%s has no such extra feature %r" % (self, ext) + ) + return deps + + def _get_metadata(self,name): + if self.has_metadata(name): + for line in self.get_metadata_lines(name): + yield line + + def activate(self,path=None): + """Ensure distribution is importable on `path` (default=sys.path)""" + if path is None: path = sys.path + self.insert_on(path) + if path is sys.path: + fixup_namespace_packages(self.location) + list(map(declare_namespace, self._get_metadata('namespace_packages.txt'))) + + + def egg_name(self): + """Return what this distribution's standard .egg filename should be""" + filename = "%s-%s-py%s" % ( + to_filename(self.project_name), to_filename(self.version), + self.py_version or PY_MAJOR + ) + + if self.platform: + filename += '-'+self.platform + return filename + + def __repr__(self): + if self.location: + return "%s (%s)" % (self,self.location) + else: + return str(self) + + def __str__(self): + try: version = getattr(self,'version',None) + except ValueError: version = None + version = version or "[unknown version]" + return "%s %s" % (self.project_name,version) + + def __getattr__(self,attr): + """Delegate all unrecognized public attributes to .metadata provider""" + if attr.startswith('_'): + raise AttributeError(attr) + return getattr(self._provider, attr) + + #@classmethod + def from_filename(cls,filename,metadata=None, **kw): + return cls.from_location( + _normalize_cached(filename), os.path.basename(filename), metadata, + **kw + ) + from_filename = classmethod(from_filename) + + def as_requirement(self): + """Return a ``Requirement`` that matches this distribution exactly""" + return Requirement.parse('%s==%s' % (self.project_name, self.version)) + + def load_entry_point(self, group, name): + """Return the `name` entry point of `group` or raise ImportError""" + ep = self.get_entry_info(group,name) + if ep is None: + raise ImportError("Entry point %r not found" % ((group,name),)) + return ep.load() + + def get_entry_map(self, group=None): + """Return the entry point map for `group`, or the full entry map""" + try: + ep_map = self._ep_map + except AttributeError: + ep_map = self._ep_map = EntryPoint.parse_map( + self._get_metadata('entry_points.txt'), self + ) + if group is not None: + return ep_map.get(group,{}) + return ep_map + + def get_entry_info(self, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return self.get_entry_map(group).get(name) + + + + + + + + + + + + + + + + + + + + def insert_on(self, path, loc = None): + """Insert self.location in path before its nearest parent directory""" + + loc = loc or self.location + + if self.project_name == 'setuptools': + try: + version = self.version + except ValueError: + version = '' + if '0.7' in version: + raise ValueError( + "A 0.7-series setuptools cannot be installed " + "with distribute. Found one at %s" % str(self.location)) + + if not loc: + return + + if path is sys.path: + self.check_version_conflict() + + nloc = _normalize_cached(loc) + bdir = os.path.dirname(nloc) + npath= list(map(_normalize_cached, path)) + + bp = None + for p, item in enumerate(npath): + if item==nloc: + break + elif item==bdir and self.precedence==EGG_DIST: + # if it's an .egg, give it precedence over its directory + path.insert(p, loc) + npath.insert(p, nloc) + break + else: + path.append(loc) + return + + # p is the spot where we found or inserted loc; now remove duplicates + while 1: + try: + np = npath.index(nloc, p+1) + except ValueError: + break + else: + del npath[np], path[np] + p = np # ha! + + return + + + + def check_version_conflict(self): + if self.key=='distribute': + return # ignore the inevitable setuptools self-conflicts :( + + nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) + loc = normalize_path(self.location) + for modname in self._get_metadata('top_level.txt'): + if (modname not in sys.modules or modname in nsp + or modname in _namespace_packages + ): + continue + if modname in ('pkg_resources', 'setuptools', 'site'): + continue + fn = getattr(sys.modules[modname], '__file__', None) + if fn and (normalize_path(fn).startswith(loc) or + fn.startswith(self.location)): + continue + issue_warning( + "Module %s was already imported from %s, but %s is being added" + " to sys.path" % (modname, fn, self.location), + ) + + def has_version(self): + try: + self.version + except ValueError: + issue_warning("Unbuilt egg for "+repr(self)) + return False + return True + + def clone(self,**kw): + """Copy this distribution, substituting in any changed keyword args""" + for attr in ( + 'project_name', 'version', 'py_version', 'platform', 'location', + 'precedence' + ): + kw.setdefault(attr, getattr(self,attr,None)) + kw.setdefault('metadata', self._provider) + return self.__class__(**kw) + + + + + #@property + def extras(self): + return [dep for dep in self._dep_map if dep] + extras = property(extras) + + +class DistInfoDistribution(Distribution): + """Wrap an actual or potential sys.path entry w/metadata, .dist-info style""" + PKG_INFO = 'METADATA' + EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") + + @property + def _parsed_pkg_info(self): + """Parse and cache metadata""" + try: + return self._pkg_info + except AttributeError: + from email.parser import Parser + self._pkg_info = Parser().parsestr(self.get_metadata(self.PKG_INFO)) + return self._pkg_info + + @property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + self.__dep_map = self._compute_dependencies() + return self.__dep_map + + def _preparse_requirement(self, requires_dist): + """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz') + Split environment marker, add == prefix to version specifiers as + necessary, and remove parenthesis. + """ + parts = requires_dist.split(';', 1) + [''] + distvers = parts[0].strip() + mark = parts[1].strip() + distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers) + distvers = distvers.replace('(', '').replace(')', '') + return (distvers, mark) + + def _compute_dependencies(self): + """Recompute this distribution's dependencies.""" + def dummy_marker(marker): + def marker_fn(environment=None, override=None): + return True + marker_fn.__doc__ = marker + return marker_fn + try: + from markerlib import as_function + except ImportError: + as_function = dummy_marker + dm = self.__dep_map = {None: []} + + reqs = [] + # Including any condition expressions + for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: + distvers, mark = self._preparse_requirement(req) + parsed = next(parse_requirements(distvers)) + parsed.marker_fn = as_function(mark) + reqs.append(parsed) + + def reqs_for_extra(extra): + for req in reqs: + if req.marker_fn(override={'extra':extra}): + yield req + + common = set(reqs_for_extra(None)) + dm[None].extend(common) + + for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: + extra = safe_extra(extra.strip()) + dm[extra] = list(set(reqs_for_extra(extra)) - common) + + return dm + + +_distributionImpl = {'.egg': Distribution, + '.egg-info': Distribution, + '.dist-info': DistInfoDistribution } + + +def issue_warning(*args,**kw): + level = 1 + g = globals() + try: + # find the first stack frame that is *not* code in + # the pkg_resources module, to use for the warning + while sys._getframe(level).f_globals is g: + level += 1 + except ValueError: + pass + from warnings import warn + warn(stacklevel = level+1, *args, **kw) + + + + + + + + + + + + + + + + + + + + + + + +def parse_requirements(strs): + """Yield ``Requirement`` objects for each specification in `strs` + + `strs` must be an instance of ``basestring``, or a (possibly-nested) + iterable thereof. + """ + # create a steppable iterator, so we can handle \-continuations + lines = iter(yield_lines(strs)) + + def scan_list(ITEM,TERMINATOR,line,p,groups,item_name): + + items = [] + + while not TERMINATOR(line,p): + if CONTINUE(line,p): + try: + line = next(lines); p = 0 + except StopIteration: + raise ValueError( + "\\ must not appear on the last nonblank line" + ) + + match = ITEM(line,p) + if not match: + raise ValueError("Expected "+item_name+" in",line,"at",line[p:]) + + items.append(match.group(*groups)) + p = match.end() + + match = COMMA(line,p) + if match: + p = match.end() # skip the comma + elif not TERMINATOR(line,p): + raise ValueError( + "Expected ',' or end-of-list in",line,"at",line[p:] + ) + + match = TERMINATOR(line,p) + if match: p = match.end() # skip the terminator, if any + return line, p, items + + for line in lines: + match = DISTRO(line) + if not match: + raise ValueError("Missing distribution spec", line) + project_name = match.group(1) + p = match.end() + extras = [] + + match = OBRACKET(line,p) + if match: + p = match.end() + line, p, extras = scan_list( + DISTRO, CBRACKET, line, p, (1,), "'extra' name" + ) + + line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec") + specs = [(op,safe_version(val)) for op,val in specs] + yield Requirement(project_name, specs, extras) + + +def _sort_dists(dists): + tmp = [(dist.hashcmp,dist) for dist in dists] + tmp.sort() + dists[::-1] = [d for hc,d in tmp] + + + + + + + + + + + + + + + + + +class Requirement: + def __init__(self, project_name, specs, extras): + """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" + self.unsafe_name, project_name = project_name, safe_name(project_name) + self.project_name, self.key = project_name, project_name.lower() + index = [(parse_version(v),state_machine[op],op,v) for op,v in specs] + index.sort() + self.specs = [(op,ver) for parsed,trans,op,ver in index] + self.index, self.extras = index, tuple(map(safe_extra,extras)) + self.hashCmp = ( + self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]), + frozenset(self.extras) + ) + self.__hash = hash(self.hashCmp) + + def __str__(self): + specs = ','.join([''.join(s) for s in self.specs]) + extras = ','.join(self.extras) + if extras: extras = '[%s]' % extras + return '%s%s%s' % (self.project_name, extras, specs) + + def __eq__(self,other): + return isinstance(other,Requirement) and self.hashCmp==other.hashCmp + + def __contains__(self,item): + if isinstance(item,Distribution): + if item.key != self.key: return False + if self.index: item = item.parsed_version # only get if we need it + elif isinstance(item,str): + item = parse_version(item) + last = None + compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1 + for parsed,trans,op,ver in self.index: + action = trans[compare(item,parsed)] # Indexing: 0, 1, -1 + if action=='F': return False + elif action=='T': return True + elif action=='+': last = True + elif action=='-' or last is None: last = False + if last is None: last = True # no rules encountered + return last + + + def __hash__(self): + return self.__hash + + def __repr__(self): return "Requirement.parse(%r)" % str(self) + + #@staticmethod + def parse(s, replacement=True): + reqs = list(parse_requirements(s)) + if reqs: + if len(reqs) == 1: + founded_req = reqs[0] + # if asked for setuptools distribution + # and if distribute is installed, we want to give + # distribute instead + if _override_setuptools(founded_req) and replacement: + distribute = list(parse_requirements('distribute')) + if len(distribute) == 1: + return distribute[0] + return founded_req + else: + return founded_req + + raise ValueError("Expected only one requirement", s) + raise ValueError("No requirements found", s) + + parse = staticmethod(parse) + +state_machine = { + # =>< + '<' : '--T', + '<=': 'T-T', + '>' : 'F+F', + '>=': 'T+F', + '==': 'T..', + '!=': 'F++', +} + + +def _override_setuptools(req): + """Return True when distribute wants to override a setuptools dependency. + + We want to override when the requirement is setuptools and the version is + a variant of 0.6. + + """ + if req.project_name == 'setuptools': + if not len(req.specs): + # Just setuptools: ok + return True + for comparator, version in req.specs: + if comparator in ['==', '>=', '>']: + if '0.7' in version: + # We want some setuptools not from the 0.6 series. + return False + return True + return False + + +def _get_mro(cls): + """Get an mro for a type or classic class""" + if not isinstance(cls,type): + class cls(cls,object): pass + return cls.__mro__[1:] + return cls.__mro__ + +def _find_adapter(registry, ob): + """Return an adapter factory for `ob` from `registry`""" + for t in _get_mro(getattr(ob, '__class__', type(ob))): + if t in registry: + return registry[t] + + +def ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + +def split_sections(s): + """Split a string or iterable thereof into (section,content) pairs + + Each ``section`` is a stripped version of the section header ("[section]") + and each ``content`` is a list of stripped lines excluding blank lines and + comment-only lines. If there are any such lines before the first section + header, they're returned in a first ``section`` of ``None``. + """ + section = None + content = [] + for line in yield_lines(s): + if line.startswith("["): + if line.endswith("]"): + if section or content: + yield section, content + section = line[1:-1].strip() + content = [] + else: + raise ValueError("Invalid section heading", line) + else: + content.append(line) + + # wrap up last segment + yield section, content + +def _mkstemp(*args,**kw): + from tempfile import mkstemp + old_open = os.open + try: + os.open = os_open # temporarily bypass sandboxing + return mkstemp(*args,**kw) + finally: + os.open = old_open # and then put it back + + +# Set up global resource manager (deliberately not state-saved) +_manager = ResourceManager() +def _initialize(g): + for name in dir(_manager): + if not name.startswith('_'): + g[name] = getattr(_manager, name) +_initialize(globals()) + +# Prepare the master working set and make the ``require()`` API available +_declare_state('object', working_set = WorkingSet()) + +try: + # Does the main program list any requirements? + from __main__ import __requires__ +except ImportError: + pass # No: just use the default working set based on sys.path +else: + # Yes: ensure the requirements are met, by prefixing sys.path if necessary + try: + working_set.require(__requires__) + except VersionConflict: # try it without defaults already on sys.path + working_set = WorkingSet([]) # by starting with an empty path + for dist in working_set.resolve( + parse_requirements(__requires__), Environment() + ): + working_set.add(dist) + for entry in sys.path: # add any missing entries from sys.path + if entry not in working_set.entries: + working_set.add_entry(entry) + sys.path[:] = working_set.entries # then copy back to sys.path + +require = working_set.require +iter_entry_points = working_set.iter_entry_points +add_activation_listener = working_set.subscribe +run_script = working_set.run_script +run_main = run_script # backward compatibility +# Activate all distributions already on sys.path, and ensure that +# all distributions added to the working set in the future (e.g. by +# calling ``require()``) will get activated as well. +add_activation_listener(lambda dist: dist.activate()) +working_set.entries=[]; list(map(working_set.add_entry,sys.path)) # match order + diff --git a/lib3/Chameleon-2.9.2/tox.ini b/lib3/Chameleon-2.9.2/tox.ini new file mode 100644 --- /dev/null +++ b/lib3/Chameleon-2.9.2/tox.ini @@ -0,0 +1,53 @@ +[tox] +envlist = + py25,py26,py27,py32,pypy,cover + +[testenv:py25] +commands = + python setup.py test -q +deps = + ordereddict + unittest2 + distribute + +[testenv:py26] +commands = + python setup.py test -q +deps = + ordereddict + unittest2 + distribute + +[testenv:py27] +commands = + python setup.py test -q +deps = + distribute + +[testenv:py32] +commands = + python setup.py test -q +deps = + distribute + +[testenv:pypy] +commands = + python setup.py test -q +deps = + ordereddict + unittest2 + distribute + +[testenv:cover] +basepython = + python2.6 +commands = + python setup.py nosetests --with-xunit --with-xcoverage +deps = + nose + coverage==3.4 + nosexcover + ordereddict + unittest2 + distribute + diff --git a/lib3/Mako-0.7.3/CHANGES b/lib3/Mako-0.7.3/CHANGES new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/CHANGES @@ -0,0 +1,847 @@ + +0.7.3 +- [bug] legacy_html_escape function, used when + Markupsafe isn't installed, was using an inline-compiled + regexp which causes major slowdowns on Python 3.3; + is now precompiled. + +- [bug] AST supporting now supports tuple-packed + function arguments inside pure-python def + or lambda expressions. [ticket:201] + +- [bug] Fixed Py3K bug in the Babel extension. + +- [bug] Fixed the "filter" attribute of the + <%text> tag so that it pulls locally specified + identifiers from the context the same + way as that of <%block> and <%filter>. + +- [bug] Fixed bug in plugin loader to correctly + raise exception when non-existent plugin + is specified. + +0.7.2 +- [bug] Fixed regression in 0.7.1 where AST + parsing for Py2.4 was broken. + [ticket:193] + +0.7.1 +- [feature] Control lines with no bodies will + now succeed, as "pass" is added for these + when no statements are otherwise present. + Courtesy Ben Trofatter [ticket:146] + +- [bug] Fixed some long-broken scoping behavior + involving variables declared in defs and such, + which only became apparent when + the strict_undefined flag was turned on. + [ticket:192] + +- [bug] Can now use strict_undefined at the + same time args passed to def() are used + by other elements of the <%def> tag. + [ticket:191] + +0.7.0 +- [feature] Added new "loop" variable to templates, + is provided within a % for block to provide + info about the loop such as index, first/last, + odd/even, etc. A migration path is also provided + for legacy templates via the "enable_loop" argument + available on Template, TemplateLookup, and <%page>. + Thanks to Ben Trofatter for all + the work on this [ticket:125] + +- [feature] Added a real check for "reserved" + names, that is names which are never pulled + from the context and cannot be passed to + the template.render() method. Current names + are "context", "loop", "UNDEFINED". + +- [feature] The html_error_template() will now + apply Pygments highlighting to the source + code displayed in the traceback, if Pygments + if available. Courtesy Ben Trofatter + [ticket:95] + +- [feature] Added support for context managers, + i.e. "% with x as e:/ % endwith" support. + Courtesy Ben Trofatter [ticket:147] + +- [feature] Added class-level flag to CacheImpl + "pass_context"; when True, the keyword argument + 'context' will be passed to get_or_create() + containing the Mako Context object. + [ticket:185] + +- [bug] Fixed some Py3K resource warnings due + to filehandles being implicitly closed. + [ticket:182] + +- [bug] Fixed endless recursion bug when + nesting multiple def-calls with content. + Thanks to Jeff Dairiki. [ticket:186] + +- [feature] Added Jinja2 to the example + benchmark suite, courtesy Vincent F??rotin + +0.6.2 +- [bug] The ${{"foo":"bar"}} parsing issue is fixed!! + The legendary Eevee has slain the dragon! + [ticket:20]. Also fixes quoting issue + at [ticket:86]. + +0.6.1 +- [bug] Added special compatibility for the 0.5.0 + Cache() constructor, which was preventing file + version checks and not allowing Mako 0.6 to + recompile the module files. + +0.6.0 + +- [feature] Template caching has been converted into a plugin + system, whereby the usage of Beaker is just the + default plugin. Template and TemplateLookup + now accept a string "cache_impl" parameter which + refers to the name of a cache plugin, defaulting + to the name 'beaker'. New plugins can be + registered as pkg_resources entrypoints under + the group "mako.cache", or registered directly + using mako.cache.register_plugin(). The + core plugin is the mako.cache.CacheImpl + class. + +- [feature] Added support for Beaker cache regions + in templates. Usage of regions should be considered + as superseding the very obsolete idea of passing in + backend options, timeouts, etc. within templates. + +- [feature] The 'put' method on Cache is now + 'set'. 'put' is there for backwards compatibility. + +- [feature] The <%def>, <%block> and <%page> tags now accept + any argument named "cache_*", and the key + minus the "cache_" prefix will be passed as keyword + arguments to the CacheImpl methods. + +- [feature] Template and TemplateLookup now accept an argument + cache_args, which refers to a dictionary containing + cache parameters. The cache_dir, cache_url, cache_type, + cache_timeout arguments are deprecated (will probably + never be removed, however) and can be passed + now as cache_args={'url':, 'type':'memcached', + 'timeout':50, 'dir':'/path/to/some/directory'} + +- [feature/bug] Can now refer to context variables + within extra arguments to <%block>, <%def>, i.e. + <%block name="foo" cache_key="${somekey}">. + Filters can also be used in this way, i.e. + <%def name="foo()" filter="myfilter"> + then template.render(myfilter=some_callable) + [ticket:180] + +- [feature] Added "--var name=value" option to the mako-render + script, allows passing of kw to the template from + the command line. [ticket:178] + +- [feature] Added module_writer argument to Template, + TemplateLookup, allows a callable to be passed which + takes over the writing of the template's module source + file, so that special environment-specific steps + can be taken. [ticket:181] + +- [bug] The exception message in the html_error_template + is now escaped with the HTML filter. [ticket:142] + +- [bug] Added "white-space:pre" style to html_error_template() + for code blocks so that indentation is preserved + [ticket:173] + +- [bug] The "benchmark" example is now Python 3 compatible + (even though several of those old template libs aren't + available on Py3K, so YMMV) [ticket:175] + +0.5 +- A Template is explicitly disallowed + from having a url that normalizes to relative outside + of the root. That is, if the Lookup is based + at /home/mytemplates, an include that would place + the ultimate template at + /home/mytemplates/../some_other_directory, + i.e. outside of /home/mytemplates, + is disallowed. This usage was never intended + despite the lack of an explicit check. + The main issue this causes + is that module files can be written outside + of the module root (or raise an error, if file perms aren't + set up), and can also lead to the same template being + cached in the lookup under multiple, relative roots. + TemplateLookup instead has always supported multiple + file roots for this purpose. + [ticket:174] + +0.4.2 +- Fixed bug regarding <%call>/def calls w/ content + whereby the identity of the "caller" callable + inside the <%def> would be corrupted by the + presence of another <%call> in the same block. + [ticket:170] + +- Fixed the babel plugin to accommodate <%block> + [ticket:169] + +0.4.1 +- New tag: <%block>. A variant on <%def> that + evaluates its contents in-place. + Can be named or anonymous, + the named version is intended for inheritance + layouts where any given section can be + surrounded by the <%block> tag in order for + it to become overrideable by inheriting + templates, without the need to specify a + top-level <%def> plus explicit call. + Modified scoping and argument rules as well as a + more strictly enforced usage scheme make it ideal + for this purpose without at all replacing most + other things that defs are still good for. + Lots of new docs. [ticket:164] + +- a slight adjustment to the "highlight" logic + for generating template bound stacktraces. + Will stick to known template source lines + without any extra guessing. [ticket:165] + +0.4.0 +- A 20% speedup for a basic two-page + inheritance setup rendering + a table of escaped data + (see http://techspot.zzzeek.org/2010/11/19/quick-mako-vs.-jinja-speed-test/). + A few configurational changes which + affect those in the I-don't-do-unicode + camp should be noted below. + +- The FastEncodingBuffer is now used + by default instead of cStringIO or StringIO, + regardless of whether output_encoding + is set to None or not. FEB is faster than + both. Only StringIO allows bytestrings + of unknown encoding to pass right + through, however - while it is of course + not recommended to send bytestrings of unknown + encoding to the output stream, this + mode of usage can be re-enabled by + setting the flag bytestring_passthrough + to True. + +- disable_unicode mode requires that + output_encoding be set to None - it also + forces the bytestring_passthrough flag + to True. + +- the <%namespace> tag raises an error + if the 'template' and 'module' attributes + are specified at the same time in + one tag. A different class is used + for each case which allows a reduction in + runtime conditional logic and function + call overhead. [ticket:156] + +- the keys() in the Context, as well as + it's internal _data dictionary, now + include just what was specified to + render() as well as Mako builtins + 'caller', 'capture'. The contents + of __builtin__ are no longer copied. + Thanks to Daniel Lopez for pointing + this out. [ticket:159] + +0.3.6 +- Documentation is on Sphinx. + [ticket:126] + +- Beaker is now part of "extras" in + setup.py instead of "install_requires". + This to produce a lighter weight install + for those who don't use the caching + as well as to conform to Pyramid + deployment practices. [ticket:154] + +- The Beaker import (or attempt thereof) + is delayed until actually needed; + this to remove the performance penalty + from startup, particularly for + "single execution" environments + such as shell scripts. [ticket:153] + +- Patch to lexer to not generate an empty + '' write in the case of backslash-ended + lines. [ticket:155] + +- Fixed missing **extra collection in + setup.py which prevented setup.py + from running 2to3 on install. + [ticket:148] + +- New flag on Template, TemplateLookup - + strict_undefined=True, will cause + variables not found in the context to + raise a NameError immediately, instead of + defaulting to the UNDEFINED value. + +- The range of Python identifiers that + are considered "undefined", meaning they + are pulled from the context, has been + trimmed back to not include variables + declared inside of expressions (i.e. from + list comprehensions), as well as + in the argument list of lambdas. This + to better support the strict_undefined + feature. The change should be + fully backwards-compatible but involved + a little bit of tinkering in the AST code, + which hadn't really been touched for + a couple of years, just FYI. + +0.3.5 +- The <%namespace> tag allows expressions + for the `file` argument, i.e. with ${}. + The `context` variable, if needed, + must be referenced explicitly. + [ticket:141] + +- ${} expressions embedded in tags, + such as <%foo:bar x="${...}">, now + allow multiline Python expressions. + +- Fixed previously non-covered regular + expression, such that using a ${} expression + inside of a tag element that doesn't allow + them raises a CompileException instead of + silently failing. + +- Added a try/except around "import markupsafe". + This to support GAE which can't run markupsafe. + [ticket:151] No idea whatsoever if the + install_requires in setup.py also breaks GAE, + couldn't get an answer on this. + +0.3.4 +- Now using MarkupSafe for HTML escaping, + i.e. in place of cgi.escape(). Faster + C-based implementation and also escapes + single quotes for additional security. + Supports the __html__ attribute for + the given expression as well. + + When using "disable_unicode" mode, + a pure Python HTML escaper function + is used which also quotes single quotes. + + Note that Pylons by default doesn't + use Mako's filter - check your + environment.py file. + +- Fixed call to "unicode.strip" in + exceptions.text_error_template which + is not Py3k compatible. [ticket:137] + +0.3.3 +- Added conditional to RichTraceback + such that if no traceback is passed + and sys.exc_info() has been reset, + the formatter just returns blank + for the "traceback" portion. + [ticket:135] + +- Fixed sometimes incorrect usage of + exc.__class__.__name__ + in html/text error templates when using + Python 2.4 [ticket:131] + +- Fixed broken @property decorator on + template.last_modified + +- Fixed error formatting when a stacktrace + line contains no line number, as in when + inside an eval/exec-generated function. + [ticket:132] + +- When a .py is being created, the tempfile + where the source is stored temporarily is + now made in the same directory as that of + the .py file. This ensures that the two + files share the same filesystem, thus + avoiding cross-filesystem synchronization + issues. Thanks to Charles Cazabon. + +0.3.2 +- Calling a def from the top, via + template.get_def(...).render() now checks the + argument signature the same way as it did in + 0.2.5, so that TypeError is not raised. + reopen of [ticket:116] + + +0.3.1 +- Fixed incorrect dir name in setup.py + [ticket:129] + +0.3 +- Python 2.3 support is dropped. [ticket:123] + +- Python 3 support is added ! See README.py3k + for installation and testing notes. + [ticket:119] + +- Unit tests now run with nose. [ticket:127] + +- Source code escaping has been simplified. + In particular, module source files are now + generated with the Python "magic encoding + comment", and source code is passed through + mostly unescaped, except for that code which + is regenerated from parsed Python source. + This fixes usage of unicode in + <%namespace:defname> tags. [ticket:99] + +- RichTraceback(), html_error_template().render(), + text_error_template().render() now accept "error" + and "traceback" as optional arguments, and + these are now actually used. [ticket:122] + +- The exception output generated when + format_exceptions=True will now be as a Python + unicode if it occurred during render_unicode(), + or an encoded string if during render(). + +- A percent sign can be emitted as the first + non-whitespace character on a line by escaping + it as in "%%". [ticket:112] + +- Template accepts empty control structure, i.e. + % if: %endif, etc. [ticket:94] + +- The <%page args> tag can now be used in a base + inheriting template - the full set of render() + arguments are passed down through the inherits + chain. Undeclared arguments go into **pageargs + as usual. [ticket:116] + +- defs declared within a <%namespace> section, an + uncommon feature, have been improved. The defs + no longer get doubly-rendered in the body() scope, + and now allow local variable assignment without + breakage. [ticket:109] + +- Windows paths are handled correctly if a Template + is passed only an absolute filename (i.e. with c: + drive etc.) and no URI - the URI is converted + to a forward-slash path and module_directory + is treated as a windows path. [ticket:128] + +- TemplateLookup raises TopLevelLookupException for + a given path that is a directory, not a filename, + instead of passing through to the template to + generate IOError. [ticket:73] + +0.2.6 + +- Fix mako function decorators to preserve the + original function's name in all cases. Patch + from Scott Torborg. + +- Support the <%namespacename:defname> syntax in + the babel extractor. [ticket:118] + +- Further fixes to unicode handling of .py files with the + html_error_template. [ticket:88] + +0.2.5 +- Added a "decorator" kw argument to <%def>, + allows custom decoration functions to wrap + rendering callables. Mainly intended for + custom caching algorithms, not sure what + other uses there may be (but there may be). + Examples are in the "filtering" docs. + +- When Mako creates subdirectories in which + to store templates, it uses the more + permissive mode of 0775 instead of 0750, + helping out with certain multi-process + scenarios. Note that the mode is always + subject to the restrictions of the existing + umask. [ticket:101] + +- Fixed namespace.__getattr__() to raise + AttributeError on attribute not found + instead of RuntimeError. [ticket:104] + +- Added last_modified accessor to Template, + returns the time.time() when the module + was created. [ticket:97] + +- Fixed lexing support for whitespace + around '=' sign in defs. [ticket:102] + +- Removed errant "lower()" in the lexer which + was causing tags to compile with + case-insensitive names, thus messing up + custom <%call> names. [ticket:108] + +- added "mako.__version__" attribute to + the base module. [ticket:110] + +0.2.4 +- Fixed compatibility with Jython 2.5b1. + +0.2.3 +- the <%namespacename:defname> syntax described at + http://techspot.zzzeek.org/?p=28 has now + been added as a built in syntax, and is recommended + as a more modern syntax versus <%call expr="expression">. + The %call tag itself will always remain, + with <%namespacename:defname> presenting a more HTML-like + alternative to calling defs, both plain and + nested. Many examples of the new syntax are in the + "Calling a def with embedded content" section + of the docs. + +- added support for Jython 2.5. + +- cache module now uses Beaker's CacheManager + object directly, so that all cache types are included. + memcached is available as both "ext:memcached" and + "memcached", the latter for backwards compatibility. + +- added "cache" accessor to Template, Namespace. + e.g. ${local.cache.get('somekey')} or + template.cache.invalidate_body() + +- added "cache_enabled=True" flag to Template, + TemplateLookup. Setting this to False causes cache + operations to "pass through" and execute every time; + this flag should be integrated in Pylons with its own + cache_enabled configuration setting. + +- the Cache object now supports invalidate_def(name), + invalidate_body(), invalidate_closure(name), + invalidate(key), which will remove the given key + from the cache, if it exists. The cache arguments + (i.e. storage type) are derived from whatever has + been already persisted for that template. + [ticket:92] + +- For cache changes to work fully, Beaker 1.1 is required. + 1.0.1 and up will work as well with the exception of + cache expiry. Note that Beaker 1.1 is **required** + for applications which use dynamically generated keys, + since previous versions will permanently store state in memory + for each individual key, thus consuming all available + memory for an arbitrarily large number of distinct + keys. + +- fixed bug whereby an <%included> template with + <%page> args named the same as a __builtin__ would not + honor the default value specified in <%page> [ticket:93] + +- fixed the html_error_template not handling tracebacks from + normal .py files with a magic encoding comment [ticket:88] + +- RichTraceback() now accepts an optional traceback object + to be used in place of sys.exc_info()[2]. html_error_template() + and text_error_template() accept an optional + render()-time argument "traceback" which is passed to the + RichTraceback object. + +- added ModuleTemplate class, which allows the construction + of a Template given a Python module generated by a previous + Template. This allows Python modules alone to be used + as templates with no compilation step. Source code + and template source are optional but allow error reporting + to work correctly. + +- fixed Python 2.3 compat. in mako.pyparser [ticket:90] + +- fix Babel 0.9.3 compatibility; stripping comment tags is now + optional (and enabled by default). + + +0.2.2 +- cached blocks now use the current context when rendering +an expired section, instead of the original context +passed in [ticket:87] +- fixed a critical issue regarding caching, whereby +a cached block would raise an error when called within a +cache-refresh operation that was initiated after the +initiating template had completed rendering. + +0.2.1 +- fixed bug where 'output_encoding' parameter would prevent +render_unicode() from returning a unicode object. +- bumped magic number, which forces template recompile for +this version (fixes incompatible compile symbols from 0.1 +series). +- added a few docs for cache options, specifically those that +help with memcached. + +0.2.0 +- Speed improvements (as though we needed them, but people + contributed and there you go): + + - added "bytestring passthru" mode, via + `disable_unicode=True` argument passed to Template or + TemplateLookup. All unicode-awareness and filtering is + turned off, and template modules are generated with + the appropriate magic encoding comment. In this mode, + template expressions can only receive raw bytestrings + or Unicode objects which represent straight ASCII, and + render_unicode() may not be used if multibyte + characters are present. When enabled, speed + improvement around 10-20%. [ticket:77] (courtesy + anonymous guest) + + - inlined the "write" function of Context into a local + template variable. This affords a 12-30% speedup in + template render time. (idea courtesy same anonymous + guest) [ticket:76] + +- New Features, API changes: + + - added "attr" accessor to namespaces. Returns + attributes configured as module level attributes, i.e. + within <%! %> sections. [ticket:62] i.e.: + + # somefile.html + <%! + foo = 27 + %> + + # some other template + <%namespace name="myns" file="somefile.html"/> + ${myns.attr.foo} + + The slight backwards incompatibility here is, you + can't have namespace defs named "attr" since the + "attr" descriptor will occlude it. + + - cache_key argument can now render arguments passed + directly to the %page or %def, i.e. <%def + name="foo(x)" cached="True" cache_key="${x}"/> + [ticket:78] + + - some functions on Context are now private: + _push_buffer(), _pop_buffer(), + caller_stack._push_frame(), caller_stack._pop_frame(). + + - added a runner script "mako-render" which renders + standard input as a template to stdout [ticket:81] + [ticket:56] + +- Bugfixes: + - can now use most names from __builtins__ as variable + names without explicit declaration (i.e. 'id', + 'exception', 'range', etc.) [ticket:83] [ticket:84] + + - can also use builtin names as local variable names + (i.e. dict, locals) (came from fix for [ticket:84]) + + - fixed bug in python generation when variable names are + used with identifiers like "else", "finally", etc. + inside them [ticket:68] + + - fixed codegen bug which occured when using <%page> + level caching, combined with an expression-based + cache_key, combined with the usage of <%namespace + import="*"/> - fixed lexer exceptions not cleaning up + temporary files, which could lead to a maximum number + of file descriptors used in the process [ticket:69] + + - fixed issue with inline format_exceptions that was + producing blank exception pages when an inheriting + template is present [ticket:71] + + - format_exceptions will apply the encoding options of + html_error_template() to the buffered output + + - rewrote the "whitespace adjuster" function to work + with more elaborate combinations of quotes and + comments [ticket:75] + +0.1.10 +- fixed propagation of 'caller' such that nested %def calls + within a <%call> tag's argument list propigates 'caller' + to the %call function itself (propigates to the inner + calls too, this is a slight side effect which previously + existed anyway) +- fixed bug where local.get_namespace() could put an + incorrect "self" in the current context +- fixed another namespace bug where the namespace functions + did not have access to the correct context containing + their 'self' and 'parent' + +0.1.9 +- filters.Decode filter can also accept a non-basestring +object and will call str() + unicode() on it [ticket:47] +- comments can be placed at the end of control lines, +i.e. if foo: # a comment, [ticket:53], thanks to +Paul Colomiets +- fixed expressions and page tag arguments and with embedded +newlines in CRLF templates, follow up to [ticket:16], thanks +Eric Woroshow +- added an IOError catch for source file not found in RichTraceback +exception reporter [ticket:51] + +0.1.8 +- variable names declared in render methods by internal +codegen prefixed by "__M_" to prevent name collisions +with user code +- added a Babel (http://babel.edgewall.org/) extractor entry +point, allowing extraction of gettext messages directly from +mako templates via Babel [ticket:45] +- fix to turbogears plugin to work with dot-separated names +(i.e. load_template('foo.bar')). also takes file extension +as a keyword argument (default is 'mak'). +- more tg fix: fixed [ticket:35], allowing string-based +templates with tgplugin even if non-compatible args were sent + +0.1.7 +- one small fix to the unit tests to support python 2.3 +- a slight hack to how cache.py detects Beaker's memcached, +works around unexplained import behavior observed on some +python 2.3 installations + +0.1.6 +- caching is now supplied directly by Beaker, which has + all of MyghtyUtils merged into it now. The latest Beaker + (0.7.1) also fixes a bug related to how Mako was using the + cache API. +- fix to module_directory path generation when the path is "./" + [ticket:34] +- TGPlugin passes options to string-based templates [ticket:35] +- added an explicit stack frame step to template runtime, which + allows much simpler and hopefully bug-free tracking of 'caller', + fixes #28 +- if plain Python defs are used with <%call>, a decorator + @runtime.supports_callable exists to ensure that the "caller" + stack is properly handled for the def. +- fix to RichTraceback and exception reporting to get template + source code as a unicode object #37 +- html_error_template includes options "full=True", "css=True" + which control generation of HTML tags, CSS [ticket:39] +- added the 'encoding_errors' parameter to Template/TemplateLookup + for specifying the error handler associated with encoding to + 'output_encoding' [ticket:40] +- the Template returned by html_error_template now defaults to + output_encoding=sys.getdefaultencoding(), + encoding_errors='htmlentityreplace' [ticket:37] +- control lines, i.e. % lines, support backslashes to continue long + lines (#32) +- fixed codegen bug when defining <%def> within <%call> within <%call> +- leading utf-8 BOM in template files is honored according to pep-0263 + +0.1.5 +- AST expression generation - added in just about everything + expression-wise from the AST module [ticket:26] +- AST parsing, properly detects imports of the form "import foo.bar" + [ticket:27] +- fix to lexing of <%docs> tag nested in other tags +- fix to context-arguments inside of <%include> tag which broke +during 0.1.4 [ticket:29] +- added "n" filter, disables *all* filters normally applied to an expression +via <%page> or default_filters (but not those within the filter) +- added buffer_filters argument, defines filters applied to the return value +of buffered/cached/filtered %defs, after all filters defined with the %def +itself have been applied. allows the creation of default expression filters +that let the output of return-valued %defs "opt out" of that filtering +via passing special attributes or objects. + +0.1.4 +- got defs-within-defs to be cacheable +- fixes to code parsing/whitespace adjusting where plain python comments + may contain quote characters [ticket:23] +- fix to variable scoping for identifiers only referenced within + functions +- added a path normalization step to lookup so URIs like + "/foo/bar/../etc/../foo" pre-process the ".." tokens before checking + the filesystem +- fixed/improved "caller" semantics so that undefined caller is + "UNDEFINED", propigates __nonzero__ method so it evaulates to False if + not present, True otherwise. this way you can say % if caller:\n + ${caller.body()}\n% endif +- <%include> has an "args" attribute that can pass arguments to the + called template (keyword arguments only, must be declared in that + page's <%page> tag.) +- <%include> plus arguments is also programmatically available via + self.include_file(, **kwargs) +- further escaping added for multibyte expressions in %def, %call + attributes [ticket:24] + + +0.1.3 +- ***Small Syntax Change*** - the single line comment character is now +*two* hash signs, i.e. "## this is a comment". This avoids a common +collection with CSS selectors. +- the magic "coding" comment (i.e. # coding:utf-8) will still work with +either one "#" sign or two for now; two is preferred going forward, i.e. +## coding:. +- new multiline comment form: "<%doc> a comment " +- UNDEFINED evaluates to False +- improvement to scoping of "caller" variable when using <%call> tag +- added lexer error for unclosed control-line (%) line +- added "preprocessor" argument to Template, TemplateLookup - is a single + callable or list of callables which will be applied to the template text + before lexing. given the text as an argument, returns the new text. +- added mako.ext.preprocessors package, contains one preprocessor so far: + 'convert_comments', which will convert single # comments to the new ## + format + +0.1.2 +- fix to parsing of code/expression blocks to insure that non-ascii + characters, combined with a template that indicates a non-standard + encoding, are expanded into backslash-escaped glyphs before being AST + parsed [ticket:11] +- all template lexing converts the template to unicode first, to + immediately catch any encoding issues and ensure internal unicode + representation. +- added module_filename argument to Template to allow specification of a + specific module file +- added modulename_callable to TemplateLookup to allow a function to + determine module filenames (takes filename, uri arguments). used for + [ticket:14] +- added optional input_encoding flag to Template, to allow sending a + unicode() object with no magic encoding comment +- "expression_filter" argument in <%page> applies only to expressions +- added "default_filters" argument to Template, TemplateLookup. applies only + to expressions, gets prepended to "expression_filter" arg from <%page>. + defaults to ["unicode"], so that all expressions get stringified into u'' + by default (this is what Mako already does). By setting to [], expressions + are passed through raw. +- added "imports" argument to Template, TemplateLookup. so you can predefine + a list of import statements at the top of the template. can be used in + conjunction with default_filters. +- support for CRLF templates...whoops ! welcome to all the windows users. + [ticket:16] +- small fix to local variable propigation for locals that are conditionally + declared +- got "top level" def calls to work, i.e. template.get_def("somedef").render() + +0.1.1 +- buffet plugin supports string-based templates, allows ToscaWidgets to work + [ticket:8] +- AST parsing fixes: fixed TryExcept identifier parsing +- removed textmate tmbundle from contrib and into separate SVN location; + windows users cant handle those files, setuptools not very good at + "pruning" certain directories +- fix so that "cache_timeout" parameter is propigated +- fix to expression filters so that string conversion (actually unicode) + properly occurs before filtering +- better error message when a lookup is attempted with a template that has no + lookup +- implemented "module" attribute for namespace +- fix to code generation to correctly track multiple defs with the same name +- "directories" can be passed to TemplateLookup as a scalar in which case it + gets converted to a list [ticket:9] + +0.1.0 + +Initial release. diff --git a/lib3/Mako-0.7.3/LICENSE b/lib3/Mako-0.7.3/LICENSE new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/LICENSE @@ -0,0 +1,20 @@ +This is the MIT license: http://www.opensource.org/licenses/mit-license.php + +Copyright (C) 2006-2012 the Mako authors and contributors . +Mako is a trademark of Michael Bayer. + +Permission is hereby granted, free of charge, to any person obtaining a copy of this +software and associated documentation files (the "Software"), to deal in the Software +without restriction, including without limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons +to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/lib3/Mako-0.7.3/MANIFEST.in b/lib3/Mako-0.7.3/MANIFEST.in new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/MANIFEST.in @@ -0,0 +1,11 @@ +# any kind of "*" pulls in __init__.pyc files, +# so all extensions are explicit. + +recursive-include doc *.html *.css *.txt *.js *.png *.py Makefile *.rst *.mako +recursive-include examples *.py *.xml *.mako *.myt *.kid *.tmpl +recursive-include test *.py *.html *.mako + +include README* LICENSE distribute_setup.py ez_setup.py CHANGES* + +prune doc/build/output + diff --git a/lib3/Mako-0.7.3/Mako.egg-info/PKG-INFO b/lib3/Mako-0.7.3/Mako.egg-info/PKG-INFO new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/Mako.egg-info/PKG-INFO @@ -0,0 +1,71 @@ +Metadata-Version: 1.0 +Name: Mako +Version: 0.7.3 +Summary: A super-fast templating language that borrows the best ideas from the existing templating languages. +Home-page: http://www.makotemplates.org/ +Author: Mike Bayer +Author-email: mike at zzzcomputing.com +License: MIT +Description: ========================= + Mako Templates for Python + ========================= + + Mako is a template library written in Python. It provides a familiar, non-XML + syntax which compiles into Python modules for maximum performance. Mako's + syntax and API borrows from the best ideas of many others, including Django + templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded + Python (i.e. Python Server Page) language, which refines the familiar ideas + of componentized layout and inheritance to produce one of the most + straightforward and flexible models available, while also maintaining close + ties to Python calling and scoping semantics. + + Nutshell + ======== + + :: + + <%inherit file="base.html"/> + <% + rows = [[v for v in range(0,10)] for row in range(0,10)] + %> + + % for row in rows: + ${makerow(row)} + % endfor +
+ + <%def name="makerow(row)"> + + % for name in row: + ${name}\ + % endfor + + + + Philosophy + =========== + + Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! + + Documentation + ============== + + See documentation for Mako at http://www.makotemplates.org/docs/ + + License + ======== + + Mako is licensed under an MIT-style license (see LICENSE). + Other incorporated projects may be licensed under different licenses. + All licenses allow for non-commercial and commercial use. + +Keywords: templates +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content diff --git a/lib3/Mako-0.7.3/Mako.egg-info/SOURCES.txt b/lib3/Mako-0.7.3/Mako.egg-info/SOURCES.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/Mako.egg-info/SOURCES.txt @@ -0,0 +1,171 @@ +CHANGES +LICENSE +MANIFEST.in +README.rst +distribute_setup.py +setup.cfg +setup.py +Mako.egg-info/PKG-INFO +Mako.egg-info/SOURCES.txt +Mako.egg-info/dependency_links.txt +Mako.egg-info/entry_points.txt +Mako.egg-info/not-zip-safe +Mako.egg-info/requires.txt +Mako.egg-info/top_level.txt +doc/caching.html +doc/defs.html +doc/filtering.html +doc/genindex.html +doc/index.html +doc/inheritance.html +doc/namespaces.html +doc/runtime.html +doc/search.html +doc/searchindex.js +doc/syntax.html +doc/unicode.html +doc/usage.html +doc/_sources/caching.txt +doc/_sources/defs.txt +doc/_sources/filtering.txt +doc/_sources/index.txt +doc/_sources/inheritance.txt +doc/_sources/namespaces.txt +doc/_sources/runtime.txt +doc/_sources/syntax.txt +doc/_sources/unicode.txt +doc/_sources/usage.txt +doc/_static/basic.css +doc/_static/comment-bright.png +doc/_static/comment-close.png +doc/_static/comment.png +doc/_static/default.css +doc/_static/docs.css +doc/_static/doctools.js +doc/_static/down-pressed.png +doc/_static/down.png +doc/_static/file.png +doc/_static/jquery.js +doc/_static/makoLogo.png +doc/_static/minus.png +doc/_static/plus.png +doc/_static/pygments.css +doc/_static/searchtools.js +doc/_static/sidebar.js +doc/_static/site.css +doc/_static/underscore.js +doc/_static/up-pressed.png +doc/_static/up.png +doc/_static/websupport.js +doc/build/Makefile +doc/build/caching.rst +doc/build/conf.py +doc/build/defs.rst +doc/build/filtering.rst +doc/build/index.rst +doc/build/inheritance.rst +doc/build/namespaces.rst +doc/build/runtime.rst +doc/build/syntax.rst +doc/build/unicode.rst +doc/build/usage.rst +doc/build/builder/__init__.py +doc/build/builder/builders.py +doc/build/builder/util.py +doc/build/static/docs.css +doc/build/static/makoLogo.png +doc/build/static/site.css +doc/build/templates/base.mako +doc/build/templates/genindex.mako +doc/build/templates/layout.mako +doc/build/templates/page.mako +doc/build/templates/rtd_layout.mako +doc/build/templates/search.mako +examples/bench/basic.py +examples/bench/cheetah/footer.tmpl +examples/bench/cheetah/header.tmpl +examples/bench/cheetah/template.tmpl +examples/bench/django/templatetags/__init__.py +examples/bench/django/templatetags/bench.py +examples/bench/kid/base.kid +examples/bench/kid/template.kid +examples/bench/myghty/base.myt +examples/bench/myghty/template.myt +examples/wsgi/run_wsgi.py +mako/__init__.py +mako/_ast_util.py +mako/ast.py +mako/cache.py +mako/codegen.py +mako/exceptions.py +mako/filters.py +mako/lexer.py +mako/lookup.py +mako/parsetree.py +mako/pygen.py +mako/pyparser.py +mako/runtime.py +mako/template.py +mako/util.py +mako/ext/__init__.py +mako/ext/autohandler.py +mako/ext/babelplugin.py +mako/ext/beaker_cache.py +mako/ext/preprocessors.py +mako/ext/pygmentplugin.py +mako/ext/turbogears.py +scripts/mako-render +test/__init__.py +test/sample_module_namespace.py +test/test_ast.py +test/test_babelplugin.py +test/test_block.py +test/test_cache.py +test/test_call.py +test/test_decorators.py +test/test_def.py +test/test_exceptions.py +test/test_filters.py +test/test_inheritance.py +test/test_lexer.py +test/test_lookup.py +test/test_loop.py +test/test_lru.py +test/test_namespace.py +test/test_pygen.py +test/test_template.py +test/test_tgplugin.py +test/test_util.py +test/util.py +test/foo/__init__.py +test/foo/test_ns.py +test/templates/badbom.html +test/templates/bom.html +test/templates/bommagic.html +test/templates/chs_unicode.html +test/templates/chs_unicode_py3k.html +test/templates/chs_utf8.html +test/templates/crlf.html +test/templates/gettext.mako +test/templates/index.html +test/templates/internationalization.html +test/templates/modtest.html +test/templates/read_unicode.html +test/templates/read_unicode_py3k.html +test/templates/runtimeerr.html +test/templates/runtimeerr_py3k.html +test/templates/unicode.html +test/templates/unicode_arguments.html +test/templates/unicode_arguments_py3k.html +test/templates/unicode_code.html +test/templates/unicode_code_py3k.html +test/templates/unicode_expr.html +test/templates/unicode_expr_py3k.html +test/templates/unicode_runtime_error.html +test/templates/unicode_syntax_error.html +test/templates/foo/modtest.html.py +test/templates/othersubdir/foo.html +test/templates/subdir/incl.html +test/templates/subdir/index.html +test/templates/subdir/modtest.html +test/templates/subdir/foo/modtest.html.py \ No newline at end of file diff --git a/lib3/Mako-0.7.3/Mako.egg-info/dependency_links.txt b/lib3/Mako-0.7.3/Mako.egg-info/dependency_links.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/Mako.egg-info/dependency_links.txt @@ -0,0 +1,1 @@ + diff --git a/lib3/Mako-0.7.3/Mako.egg-info/entry_points.txt b/lib3/Mako-0.7.3/Mako.egg-info/entry_points.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/Mako.egg-info/entry_points.txt @@ -0,0 +1,14 @@ + + [python.templating.engines] + mako = mako.ext.turbogears:TGPlugin + + [pygments.lexers] + mako = mako.ext.pygmentplugin:MakoLexer + html+mako = mako.ext.pygmentplugin:MakoHtmlLexer + xml+mako = mako.ext.pygmentplugin:MakoXmlLexer + js+mako = mako.ext.pygmentplugin:MakoJavascriptLexer + css+mako = mako.ext.pygmentplugin:MakoCssLexer + + [babel.extractors] + mako = mako.ext.babelplugin:extract + \ No newline at end of file diff --git a/lib3/Mako-0.7.3/Mako.egg-info/not-zip-safe b/lib3/Mako-0.7.3/Mako.egg-info/not-zip-safe new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/Mako.egg-info/not-zip-safe @@ -0,0 +1,1 @@ + diff --git a/lib3/Mako-0.7.3/Mako.egg-info/requires.txt b/lib3/Mako-0.7.3/Mako.egg-info/requires.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/Mako.egg-info/requires.txt @@ -0,0 +1,4 @@ +MarkupSafe>=0.9.2 + +[beaker] +Beaker>=1.1 \ No newline at end of file diff --git a/lib3/Mako-0.7.3/Mako.egg-info/top_level.txt b/lib3/Mako-0.7.3/Mako.egg-info/top_level.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/Mako.egg-info/top_level.txt @@ -0,0 +1,1 @@ +mako diff --git a/lib3/Mako-0.7.3/PKG-INFO b/lib3/Mako-0.7.3/PKG-INFO new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/PKG-INFO @@ -0,0 +1,71 @@ +Metadata-Version: 1.0 +Name: Mako +Version: 0.7.3 +Summary: A super-fast templating language that borrows the best ideas from the existing templating languages. +Home-page: http://www.makotemplates.org/ +Author: Mike Bayer +Author-email: mike at zzzcomputing.com +License: MIT +Description: ========================= + Mako Templates for Python + ========================= + + Mako is a template library written in Python. It provides a familiar, non-XML + syntax which compiles into Python modules for maximum performance. Mako's + syntax and API borrows from the best ideas of many others, including Django + templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded + Python (i.e. Python Server Page) language, which refines the familiar ideas + of componentized layout and inheritance to produce one of the most + straightforward and flexible models available, while also maintaining close + ties to Python calling and scoping semantics. + + Nutshell + ======== + + :: + + <%inherit file="base.html"/> + <% + rows = [[v for v in range(0,10)] for row in range(0,10)] + %> + + % for row in rows: + ${makerow(row)} + % endfor +
+ + <%def name="makerow(row)"> + + % for name in row: + ${name}\ + % endfor + + + + Philosophy + =========== + + Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! + + Documentation + ============== + + See documentation for Mako at http://www.makotemplates.org/docs/ + + License + ======== + + Mako is licensed under an MIT-style license (see LICENSE). + Other incorporated projects may be licensed under different licenses. + All licenses allow for non-commercial and commercial use. + +Keywords: templates +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content diff --git a/lib3/Mako-0.7.3/README.rst b/lib3/Mako-0.7.3/README.rst new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/README.rst @@ -0,0 +1,52 @@ +========================= +Mako Templates for Python +========================= + +Mako is a template library written in Python. It provides a familiar, non-XML +syntax which compiles into Python modules for maximum performance. Mako's +syntax and API borrows from the best ideas of many others, including Django +templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded +Python (i.e. Python Server Page) language, which refines the familiar ideas +of componentized layout and inheritance to produce one of the most +straightforward and flexible models available, while also maintaining close +ties to Python calling and scoping semantics. + +Nutshell +======== + +:: + + <%inherit file="base.html"/> + <% + rows = [[v for v in range(0,10)] for row in range(0,10)] + %> + + % for row in rows: + ${makerow(row)} + % endfor +
+ + <%def name="makerow(row)"> + + % for name in row: + ${name}\ + % endfor + + + +Philosophy +=========== + +Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! + +Documentation +============== + +See documentation for Mako at http://www.makotemplates.org/docs/ + +License +======== + +Mako is licensed under an MIT-style license (see LICENSE). +Other incorporated projects may be licensed under different licenses. +All licenses allow for non-commercial and commercial use. diff --git a/lib3/Mako-0.7.3/distribute_setup.py b/lib3/Mako-0.7.3/distribute_setup.py new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/distribute_setup.py @@ -0,0 +1,485 @@ +#!python +"""Bootstrap distribute installation + +If you want to use setuptools in your package's setup.py, just include this +file in the same directory with it, and add this to the top of your setup.py:: + + from distribute_setup import use_setuptools + use_setuptools() + +If you want to require a specific version of setuptools, set a download +mirror, or use an alternate download directory, you can do so by supplying +the appropriate options to ``use_setuptools()``. + +This file can also be run as a script to install or upgrade setuptools. +""" +import os +import sys +import time +import fnmatch +import tempfile +import tarfile +from distutils import log + +try: + from site import USER_SITE +except ImportError: + USER_SITE = None + +try: + import subprocess + + def _python_cmd(*args): + args = (sys.executable,) + args + return subprocess.call(args) == 0 + +except ImportError: + # will be used for python 2.3 + def _python_cmd(*args): + args = (sys.executable,) + args + # quoting arguments if windows + if sys.platform == 'win32': + def quote(arg): + if ' ' in arg: + return '"%s"' % arg + return arg + args = [quote(arg) for arg in args] + return os.spawnl(os.P_WAIT, sys.executable, *args) == 0 + +DEFAULT_VERSION = "0.6.13" +DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/" +SETUPTOOLS_FAKED_VERSION = "0.6c11" + +SETUPTOOLS_PKG_INFO = """\ +Metadata-Version: 1.0 +Name: setuptools +Version: %s +Summary: xxxx +Home-page: xxx +Author: xxx +Author-email: xxx +License: xxx +Description: xxx +""" % SETUPTOOLS_FAKED_VERSION + + +def _install(tarball): + # extracting the tarball + tmpdir = tempfile.mkdtemp() + log.warn('Extracting in %s', tmpdir) + old_wd = os.getcwd() + try: + os.chdir(tmpdir) + tar = tarfile.open(tarball) + _extractall(tar) + tar.close() + + # going in the directory + subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) + os.chdir(subdir) + log.warn('Now working in %s', subdir) + + # installing + log.warn('Installing Distribute') + if not _python_cmd('setup.py', 'install'): + log.warn('Something went wrong during the installation.') + log.warn('See the error message above.') + finally: + os.chdir(old_wd) + + +def _build_egg(egg, tarball, to_dir): + # extracting the tarball + tmpdir = tempfile.mkdtemp() + log.warn('Extracting in %s', tmpdir) + old_wd = os.getcwd() + try: + os.chdir(tmpdir) + tar = tarfile.open(tarball) + _extractall(tar) + tar.close() + + # going in the directory + subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) + os.chdir(subdir) + log.warn('Now working in %s', subdir) + + # building an egg + log.warn('Building a Distribute egg in %s', to_dir) + _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) + + finally: + os.chdir(old_wd) + # returning the result + log.warn(egg) + if not os.path.exists(egg): + raise IOError('Could not build the egg.') + + +def _do_download(version, download_base, to_dir, download_delay): + egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg' + % (version, sys.version_info[0], sys.version_info[1])) + if not os.path.exists(egg): + tarball = download_setuptools(version, download_base, + to_dir, download_delay) + _build_egg(egg, tarball, to_dir) + sys.path.insert(0, egg) + import setuptools + setuptools.bootstrap_install_from = egg + + +def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, + to_dir=os.curdir, download_delay=15, no_fake=True): + # making sure we use the absolute path + to_dir = os.path.abspath(to_dir) + was_imported = 'pkg_resources' in sys.modules or \ + 'setuptools' in sys.modules + try: + try: + import pkg_resources + if not hasattr(pkg_resources, '_distribute'): + if not no_fake: + _fake_setuptools() + raise ImportError + except ImportError: + return _do_download(version, download_base, to_dir, download_delay) + try: + pkg_resources.require("distribute>="+version) + return + except pkg_resources.VersionConflict: + e = sys.exc_info()[1] + if was_imported: + sys.stderr.write( + "The required version of distribute (>=%s) is not available,\n" + "and can't be installed while this script is running. Please\n" + "install a more recent version first, using\n" + "'easy_install -U distribute'." + "\n\n(Currently using %r)\n" % (version, e.args[0])) + sys.exit(2) + else: + del pkg_resources, sys.modules['pkg_resources'] # reload ok + return _do_download(version, download_base, to_dir, + download_delay) + except pkg_resources.DistributionNotFound: + return _do_download(version, download_base, to_dir, + download_delay) + finally: + if not no_fake: + _create_fake_setuptools_pkg_info(to_dir) + +def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, + to_dir=os.curdir, delay=15): + """Download distribute from a specified location and return its filename + + `version` should be a valid distribute version number that is available + as an egg for download under the `download_base` URL (which should end + with a '/'). `to_dir` is the directory where the egg will be downloaded. + `delay` is the number of seconds to pause before an actual download + attempt. + """ + # making sure we use the absolute path + to_dir = os.path.abspath(to_dir) + try: + from urllib.request import urlopen + except ImportError: + from urllib.request import urlopen + tgz_name = "distribute-%s.tar.gz" % version + url = download_base + tgz_name + saveto = os.path.join(to_dir, tgz_name) + src = dst = None + if not os.path.exists(saveto): # Avoid repeated downloads + try: + log.warn("Downloading %s", url) + src = urlopen(url) + # Read/write all in one block, so we don't create a corrupt file + # if the download is interrupted. + data = src.read() + dst = open(saveto, "wb") + dst.write(data) + finally: + if src: + src.close() + if dst: + dst.close() + return os.path.realpath(saveto) + +def _no_sandbox(function): + def __no_sandbox(*args, **kw): + try: + from setuptools.sandbox import DirectorySandbox + if not hasattr(DirectorySandbox, '_old'): + def violation(*args): + pass + DirectorySandbox._old = DirectorySandbox._violation + DirectorySandbox._violation = violation + patched = True + else: + patched = False + except ImportError: + patched = False + + try: + return function(*args, **kw) + finally: + if patched: + DirectorySandbox._violation = DirectorySandbox._old + del DirectorySandbox._old + + return __no_sandbox + +def _patch_file(path, content): + """Will backup the file then patch it""" + existing_content = open(path).read() + if existing_content == content: + # already patched + log.warn('Already patched.') + return False + log.warn('Patching...') + _rename_path(path) + f = open(path, 'w') + try: + f.write(content) + finally: + f.close() + return True + +_patch_file = _no_sandbox(_patch_file) + +def _same_content(path, content): + return open(path).read() == content + +def _rename_path(path): + new_name = path + '.OLD.%s' % time.time() + log.warn('Renaming %s into %s', path, new_name) + os.rename(path, new_name) + return new_name + +def _remove_flat_installation(placeholder): + if not os.path.isdir(placeholder): + log.warn('Unkown installation at %s', placeholder) + return False + found = False + for file in os.listdir(placeholder): + if fnmatch.fnmatch(file, 'setuptools*.egg-info'): + found = True + break + if not found: + log.warn('Could not locate setuptools*.egg-info') + return + + log.warn('Removing elements out of the way...') + pkg_info = os.path.join(placeholder, file) + if os.path.isdir(pkg_info): + patched = _patch_egg_dir(pkg_info) + else: + patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO) + + if not patched: + log.warn('%s already patched.', pkg_info) + return False + # now let's move the files out of the way + for element in ('setuptools', 'pkg_resources.py', 'site.py'): + element = os.path.join(placeholder, element) + if os.path.exists(element): + _rename_path(element) + else: + log.warn('Could not find the %s element of the ' + 'Setuptools distribution', element) + return True + +_remove_flat_installation = _no_sandbox(_remove_flat_installation) + +def _after_install(dist): + log.warn('After install bootstrap.') + placeholder = dist.get_command_obj('install').install_purelib + _create_fake_setuptools_pkg_info(placeholder) + +def _create_fake_setuptools_pkg_info(placeholder): + if not placeholder or not os.path.exists(placeholder): + log.warn('Could not find the install location') + return + pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1]) + setuptools_file = 'setuptools-%s-py%s.egg-info' % \ + (SETUPTOOLS_FAKED_VERSION, pyver) + pkg_info = os.path.join(placeholder, setuptools_file) + if os.path.exists(pkg_info): + log.warn('%s already exists', pkg_info) + return + + log.warn('Creating %s', pkg_info) + f = open(pkg_info, 'w') + try: + f.write(SETUPTOOLS_PKG_INFO) + finally: + f.close() + + pth_file = os.path.join(placeholder, 'setuptools.pth') + log.warn('Creating %s', pth_file) + f = open(pth_file, 'w') + try: + f.write(os.path.join(os.curdir, setuptools_file)) + finally: + f.close() + +_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info) + +def _patch_egg_dir(path): + # let's check if it's already patched + pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') + if os.path.exists(pkg_info): + if _same_content(pkg_info, SETUPTOOLS_PKG_INFO): + log.warn('%s already patched.', pkg_info) + return False + _rename_path(path) + os.mkdir(path) + os.mkdir(os.path.join(path, 'EGG-INFO')) + pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') + f = open(pkg_info, 'w') + try: + f.write(SETUPTOOLS_PKG_INFO) + finally: + f.close() + return True + +_patch_egg_dir = _no_sandbox(_patch_egg_dir) + +def _before_install(): + log.warn('Before install bootstrap.') + _fake_setuptools() + + +def _under_prefix(location): + if 'install' not in sys.argv: + return True + args = sys.argv[sys.argv.index('install')+1:] + for index, arg in enumerate(args): + for option in ('--root', '--prefix'): + if arg.startswith('%s=' % option): + top_dir = arg.split('root=')[-1] + return location.startswith(top_dir) + elif arg == option: + if len(args) > index: + top_dir = args[index+1] + return location.startswith(top_dir) + if arg == '--user' and USER_SITE is not None: + return location.startswith(USER_SITE) + return True + + +def _fake_setuptools(): + log.warn('Scanning installed packages') + try: + import pkg_resources + except ImportError: + # we're cool + log.warn('Setuptools or Distribute does not seem to be installed.') + return + ws = pkg_resources.working_set + try: + setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools', + replacement=False)) + except TypeError: + # old distribute API + setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools')) + + if setuptools_dist is None: + log.warn('No setuptools distribution found') + return + # detecting if it was already faked + setuptools_location = setuptools_dist.location + log.warn('Setuptools installation detected at %s', setuptools_location) + + # if --root or --preix was provided, and if + # setuptools is not located in them, we don't patch it + if not _under_prefix(setuptools_location): + log.warn('Not patching, --root or --prefix is installing Distribute' + ' in another location') + return + + # let's see if its an egg + if not setuptools_location.endswith('.egg'): + log.warn('Non-egg installation') + res = _remove_flat_installation(setuptools_location) + if not res: + return + else: + log.warn('Egg installation') + pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO') + if (os.path.exists(pkg_info) and + _same_content(pkg_info, SETUPTOOLS_PKG_INFO)): + log.warn('Already patched.') + return + log.warn('Patching...') + # let's create a fake egg replacing setuptools one + res = _patch_egg_dir(setuptools_location) + if not res: + return + log.warn('Patched done.') + _relaunch() + + +def _relaunch(): + log.warn('Relaunching...') + # we have to relaunch the process + # pip marker to avoid a relaunch bug + if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']: + sys.argv[0] = 'setup.py' + args = [sys.executable] + sys.argv + sys.exit(subprocess.call(args)) + + +def _extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + import copy + import operator + from tarfile import ExtractError + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directories with a safe mode. + directories.append(tarinfo) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 448 # decimal for oct 0700 + self.extract(tarinfo, path) + + # Reverse sort directories. + if sys.version_info < (2, 4): + def sorter(dir1, dir2): + return cmp(dir1.name, dir2.name) + directories.sort(sorter) + directories.reverse() + else: + directories.sort(key=operator.attrgetter('name'), reverse=True) + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError: + e = sys.exc_info()[1] + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + +def main(argv, version=DEFAULT_VERSION): + """Install or upgrade setuptools and EasyInstall""" + tarball = download_setuptools() + _install(tarball) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/lib3/Mako-0.7.3/doc/_sources/caching.txt b/lib3/Mako-0.7.3/doc/_sources/caching.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_sources/caching.txt @@ -0,0 +1,393 @@ +.. _caching_toplevel: + +======= +Caching +======= + +Any template or component can be cached using the ``cache`` +argument to the ``<%page>``, ``<%def>`` or ``<%block>`` directives: + +.. sourcecode:: mako + + <%page cached="True"/> + + template text + +The above template, after being executed the first time, will +store its content within a cache that by default is scoped +within memory. Subsequent calls to the template's :meth:`~.Template.render` +method will return content directly from the cache. When the +:class:`.Template` object itself falls out of scope, its corresponding +cache is garbage collected along with the template. + +By default, caching requires that the `Beaker `_ package be installed on the +system, however the mechanism of caching can be customized to use +any third party or user defined system -- see :ref:`cache_plugins`. + +In addition to being available on the ``<%page>`` tag, the caching flag and all +its options can be used with the ``<%def>`` tag as well: + +.. sourcecode:: mako + + <%def name="mycomp" cached="True" cache_timeout="60"> + other text + + +... and equivalently with the ``<%block>`` tag, anonymous or named: + +.. sourcecode:: mako + + <%block cached="True" cache_timeout="60"> + other text + + +Cache Arguments +=============== + +Mako has two cache arguments available on tags that are +available in all cases. The rest of the arguments +available are specific to a backend. + +The two generic tags arguments are: + +* ``cached="True"`` - enable caching for this ``<%page>``, + ``<%def>``, or ``<%block>``. +* ``cache_key`` - the "key" used to uniquely identify this content + in the cache. Usually, this key is chosen automatically + based on the name of the rendering callable (i.e. ``body`` + when used in ``<%page>``, the name of the def when using ``<%def>``, + the explicit or internally-generated name when using ``<%block>``). + Using the ``cache_key`` parameter, the key can be overridden + using a fixed or programmatically generated value. + + For example, here's a page + that caches any page which inherits from it, based on the + filename of the calling template: + + .. sourcecode:: mako + + <%page cached="True" cache_key="${self.filename}"/> + + ${next.body()} + + ## rest of template + +On a :class:`.Template` or :class:`.TemplateLookup`, the +caching can be configured using these arguments: + +* ``cache_enabled`` - Setting this + to ``False`` will disable all caching functionality + when the template renders. Defaults to ``True``. + e.g.: + + .. sourcecode:: python + + lookup = TemplateLookup( + directories='/path/to/templates', + cache_enabled = False + ) + +* ``cache_impl`` - The string name of the cache backend + to use. This defaults to ``'beaker'``, which has historically + been the only cache backend supported by Mako. + + .. versionadded:: 0.6.0 + + For example, here's how to use the upcoming + `dogpile.cache `_ + backend: + + .. sourcecode:: python + + lookup = TemplateLookup( + directories='/path/to/templates', + cache_impl = 'dogpile.cache', + cache_args = {'regions':my_dogpile_regions} + ) + +* ``cache_args`` - A dictionary of cache parameters that + will be consumed by the cache backend. See + :ref:`beaker_backend` for examples. + + .. versionadded:: 0.6.0 + +Backend-Specific Cache Arguments +-------------------------------- + +The ``<%page>``, ``<%def>``, and ``<%block>`` tags +accept any named argument that starts with the prefix ``"cache_"``. +Those arguments are then packaged up and passed along to the +underlying caching implementation, minus the ``"cache_"`` prefix. + +The actual arguments understood are determined by the backend. + +* :ref:`beaker_backend` - Includes arguments understood by + Beaker. +* :ref:`dogpile.cache_backend` - Includes arguments understood by + dogpile.cache. + +.. _beaker_backend: + +Using the Beaker Cache Backend +------------------------------ + +When using Beaker, new implementations will want to make usage +of **cache regions** so that cache configurations can be maintained +externally to templates. These configurations live under +named "regions" that can be referred to within templates themselves. + +.. versionadded:: 0.6.0 + Support for Beaker cache regions. + +For example, suppose we would like two regions. One is a "short term" +region that will store content in a memory-based dictionary, +expiring after 60 seconds. The other is a Memcached region, +where values should expire in five minutes. To configure +our :class:`.TemplateLookup`, first we get a handle to a +:class:`beaker.cache.CacheManager`: + +.. sourcecode:: python + + from beaker.cache import CacheManager + + manager = CacheManager(cache_regions={ + 'short_term':{ + 'type': 'memory', + 'expire': 60 + }, + 'long_term':{ + 'type': 'ext:memcached', + 'url': '127.0.0.1:11211', + 'expire': 300 + } + }) + + lookup = TemplateLookup( + directories=['/path/to/templates'], + module_directory='/path/to/modules', + cache_impl='beaker', + cache_args={ + 'manager':manager + } + ) + +Our templates can then opt to cache data in one of either region, +using the ``cache_region`` argument. Such as using ``short_term`` +at the ``<%page>`` level: + +.. sourcecode:: mako + + <%page cached="True" cache_region="short_term"> + + ## ... + +Or, ``long_term`` at the ``<%block>`` level: + +.. sourcecode:: mako + + <%block name="header" cached="True" cache_region="long_term"> + other text + + +The Beaker backend also works without regions. There are a +variety of arguments that can be passed to the ``cache_args`` +dictionary, which are also allowable in templates via the +``<%page>``, ``<%block>``, +and ``<%def>`` tags specific to those sections. The values +given override those specified at the :class:`.TemplateLookup` +or :class:`.Template` level. + +With the possible exception +of ``cache_timeout``, these arguments are probably better off +staying at the template configuration level. Each argument +specified as ``cache_XYZ`` in a template tag is specified +without the ``cache_`` prefix in the ``cache_args`` dictionary: + +* ``cache_timeout`` - number of seconds in which to invalidate the + cached data. After this timeout, the content is re-generated + on the next call. Available as ``timeout`` in the ``cache_args`` + dictionary. +* ``cache_type`` - type of caching. ``'memory'``, ``'file'``, ``'dbm'``, or + ``'ext:memcached'`` (note that the string ``memcached`` is + also accepted by the dogpile.cache Mako plugin, though not by Beaker itself). + Available as ``type`` in the ``cache_args`` dictionary. +* ``cache_url`` - (only used for ``memcached`` but required) a single + IP address or a semi-colon separated list of IP address of + memcache servers to use. Available as ``url`` in the ``cache_args`` + dictionary. +* ``cache_dir`` - in the case of the ``'file'`` and ``'dbm'`` cache types, + this is the filesystem directory with which to store data + files. If this option is not present, the value of + ``module_directory`` is used (i.e. the directory where compiled + template modules are stored). If neither option is available + an exception is thrown. Available as ``dir`` in the + ``cache_args`` dictionary. + +.. _dogpile.cache_backend: + +Using the dogpile.cache Backend +------------------------------- + +`dogpile.cache`_ is a new replacement for Beaker. It provides +a modernized, slimmed down interface and is generally easier to use +than Beaker. As of this writing it has not yet been released. dogpile.cache +includes its own Mako cache plugin -- see :mod:`dogpile.cache.plugins.mako_cache` in the +dogpile.cache documentation. + +Programmatic Cache Access +========================= + +The :class:`.Template`, as well as any template-derived :class:`.Namespace`, has +an accessor called ``cache`` which returns the :class:`.Cache` object +for that template. This object is a facade on top of the underlying +:class:`.CacheImpl` object, and provides some very rudimental +capabilities, such as the ability to get and put arbitrary +values: + +.. sourcecode:: mako + + <% + local.cache.set("somekey", type="memory", "somevalue") + %> + +Above, the cache associated with the ``local`` namespace is +accessed and a key is placed within a memory cache. + +More commonly, the ``cache`` object is used to invalidate cached +sections programmatically: + +.. sourcecode:: python + + template = lookup.get_template('/sometemplate.html') + + # invalidate the "body" of the template + template.cache.invalidate_body() + + # invalidate an individual def + template.cache.invalidate_def('somedef') + + # invalidate an arbitrary key + template.cache.invalidate('somekey') + +You can access any special method or attribute of the :class:`.CacheImpl` +itself using the :attr:`impl <.Cache.impl>` attribute: + +.. sourcecode:: python + + template.cache.impl.do_something_special() + +Note that using implementation-specific methods will mean you can't +swap in a different kind of :class:`.CacheImpl` implementation at a +later time. + +.. _cache_plugins: + +Cache Plugins +============= + +The mechanism used by caching can be plugged in +using a :class:`.CacheImpl` subclass. This class implements +the rudimental methods Mako needs to implement the caching +API. Mako includes the :class:`.BeakerCacheImpl` class to +provide the default implementation. A :class:`.CacheImpl` class +is acquired by Mako using a ``pkg_resources`` entrypoint, using +the name given as the ``cache_impl`` argument to :class:`.Template` +or :class:`.TemplateLookup`. This entry point can be +installed via the standard `setuptools`/``setup()`` procedure, underneath +the `EntryPoint` group named ``"mako.cache"``. It can also be +installed at runtime via a convenience installer :func:`.register_plugin` +which accomplishes essentially the same task. + +An example plugin that implements a local dictionary cache: + +.. sourcecode:: python + + from mako.cache import Cacheimpl, register_plugin + + class SimpleCacheImpl(CacheImpl): + def __init__(self, cache): + super(SimpleCacheImpl, self).__init__(cache) + self._cache = {} + + def get_or_create(self, key, creation_function, **kw): + if key in self._cache: + return self._cache[key] + else: + self._cache[key] = value = creation_function() + return value + + def set(self, key, value, **kwargs): + self._cache[key] = value + + def get(self, key, **kwargs): + return self._cache.get(key) + + def invalidate(self, key, **kwargs): + self._cache.pop(key, None) + + # optional - register the class locally + register_plugin("simple", __name__, "SimpleCacheImpl") + +Enabling the above plugin in a template would look like: + +.. sourcecode:: python + + t = Template("mytemplate", + file="mytemplate.html", + cache_impl='simple') + +Guidelines for Writing Cache Plugins +------------------------------------ + +* The :class:`.CacheImpl` is created on a per-:class:`.Template` basis. The + class should ensure that only data for the parent :class:`.Template` is + persisted or returned by the cache methods. The actual :class:`.Template` + is available via the ``self.cache.template`` attribute. The ``self.cache.id`` + attribute, which is essentially the unique modulename of the template, is + a good value to use in order to represent a unique namespace of keys specific + to the template. +* Templates only use the :meth:`.CacheImpl.get_or_create()` method + in an implicit fashion. The :meth:`.CacheImpl.set`, + :meth:`.CacheImpl.get`, and :meth:`.CacheImpl.invalidate` methods are + only used in response to direct programmatic access to the corresponding + methods on the :class:`.Cache` object. +* :class:`.CacheImpl` will be accessed in a multithreaded fashion if the + :class:`.Template` itself is used multithreaded. Care should be taken + to ensure caching implementations are threadsafe. +* A library like `Dogpile `_, which + is a minimal locking system derived from Beaker, can be used to help + implement the :meth:`.CacheImpl.get_or_create` method in a threadsafe + way that can maximize effectiveness across multiple threads as well + as processes. :meth:`.CacheImpl.get_or_create` is the + key method used by templates. +* All arguments passed to ``**kw`` come directly from the parameters + inside the ``<%def>``, ``<%block>``, or ``<%page>`` tags directly, + minus the ``"cache_"`` prefix, as strings, with the exception of + the argument ``cache_timeout``, which is passed to the plugin + as the name ``timeout`` with the value converted to an integer. + Arguments present in ``cache_args`` on :class:`.Template` or + :class:`.TemplateLookup` are passed directly, but are superseded + by those present in the most specific template tag. +* The directory where :class:`.Template` places module files can + be acquired using the accessor ``self.cache.template.module_directory``. + This directory can be a good place to throw cache-related work + files, underneath a prefix like ``_my_cache_work`` so that name + conflicts with generated modules don't occur. + +API Reference +============= + +.. autoclass:: mako.cache.Cache + :members: + :show-inheritance: + +.. autoclass:: mako.cache.CacheImpl + :members: + :show-inheritance: + +.. autofunction:: mako.cache.register_plugin + +.. autoclass:: mako.ext.beaker_cache.BeakerCacheImpl + :members: + :show-inheritance: + diff --git a/lib3/Mako-0.7.3/doc/_sources/defs.txt b/lib3/Mako-0.7.3/doc/_sources/defs.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_sources/defs.txt @@ -0,0 +1,622 @@ +.. _defs_toplevel: + +=============== +Defs and Blocks +=============== + +``<%def>`` and ``<%block>`` are two tags that both demarcate any block of text +and/or code. They both exist within generated Python as a callable function, +i.e., a Python ``def``. They differ in their scope and calling semantics. +Whereas ``<%def>`` provides a construct that is very much like a named Python +``def``, the ``<%block>`` is more layout oriented. + +Using Defs +========== + +The ``<%def>`` tag requires a ``name`` attribute, where the ``name`` references +a Python function signature: + +.. sourcecode:: mako + + <%def name="hello()"> + hello world + + +To invoke the ``<%def>``, it is normally called as an expression: + +.. sourcecode:: mako + + the def: ${hello()} + +If the ``<%def>`` is not nested inside of another ``<%def>``, +it's known as a **top level def** and can be accessed anywhere in +the template, including above where it was defined. + +All defs, top level or not, have access to the current +contextual namespace in exactly the same way their containing +template does. Suppose the template below is executed with the +variables ``username`` and ``accountdata`` inside the context: + +.. sourcecode:: mako + + Hello there ${username}, how are ya. Lets see what your account says: + + ${account()} + + <%def name="account()"> + Account for ${username}:
+ + % for row in accountdata: + Value: ${row}
+ % endfor + + +The ``username`` and ``accountdata`` variables are present +within the main template body as well as the body of the +``account()`` def. + +Since defs are just Python functions, you can define and pass +arguments to them as well: + +.. sourcecode:: mako + + ${account(accountname='john')} + + <%def name="account(accountname, type='regular')"> + account name: ${accountname}, type: ${type} + + +When you declare an argument signature for your def, they are +required to follow normal Python conventions (i.e., all +arguments are required except keyword arguments with a default +value). This is in contrast to using context-level variables, +which evaluate to ``UNDEFINED`` if you reference a name that +does not exist. + +Calling Defs from Other Files +----------------------------- + +Top level ``<%def>``\ s are **exported** by your template's +module, and can be called from the outside; including from other +templates, as well as normal Python code. Calling a ``<%def>`` +from another template is something like using an ``<%include>`` +-- except you are calling a specific function within the +template, not the whole template. + +The remote ``<%def>`` call is also a little bit like calling +functions from other modules in Python. There is an "import" +step to pull the names from another template into your own +template; then the function or functions are available. + +To import another template, use the ``<%namespace>`` tag: + +.. sourcecode:: mako + + <%namespace name="mystuff" file="mystuff.html"/> + +The above tag adds a local variable ``mystuff`` to the current +scope. + +Then, just call the defs off of ``mystuff``: + +.. sourcecode:: mako + + ${mystuff.somedef(x=5,y=7)} + +The ``<%namespace>`` tag also supports some of the other +semantics of Python's ``import`` statement, including pulling +names into the local variable space, or using ``*`` to represent +all names, using the ``import`` attribute: + +.. sourcecode:: mako + + <%namespace file="mystuff.html" import="foo, bar"/> + +This is just a quick intro to the concept of a **namespace**, +which is a central Mako concept that has its own chapter in +these docs. For more detail and examples, see +:ref:`namespaces_toplevel`. + +Calling Defs Programmatically +----------------------------- + +You can call defs programmatically from any :class:`.Template` object +using the :meth:`~.Template.get_def()` method, which returns a :class:`.DefTemplate` +object. This is a :class:`.Template` subclass which the parent +:class:`.Template` creates, and is usable like any other template: + +.. sourcecode:: python + + from mako.template import Template + + template = Template(""" + <%def name="hi(name)"> + hi ${name}! + + + <%def name="bye(name)"> + bye ${name}! + + """) + + print template.get_def("hi").render(name="ed") + print template.get_def("bye").render(name="ed") + +Defs within Defs +---------------- + +The def model follows regular Python rules for closures. +Declaring ``<%def>`` inside another ``<%def>`` declares it +within the parent's **enclosing scope**: + +.. sourcecode:: mako + + <%def name="mydef()"> + <%def name="subdef()"> + a sub def + + + i'm the def, and the subcomponent is ${subdef()} + + +Just like Python, names that exist outside the inner ``<%def>`` +exist inside it as well: + +.. sourcecode:: mako + + <% + x = 12 + %> + <%def name="outer()"> + <% + y = 15 + %> + <%def name="inner()"> + inner, x is ${x}, y is ${y} + + + outer, x is ${x}, y is ${y} + + +Assigning to a name inside of a def declares that name as local +to the scope of that def (again, like Python itself). This means +the following code will raise an error: + +.. sourcecode:: mako + + <% + x = 10 + %> + <%def name="somedef()"> + ## error ! + somedef, x is ${x} + <% + x = 27 + %> + + +...because the assignment to ``x`` declares ``x`` as local to the +scope of ``somedef``, rendering the "outer" version unreachable +in the expression that tries to render it. + +.. _defs_with_content: + +Calling a Def with Embedded Content and/or Other Defs +----------------------------------------------------- + +A flip-side to def within def is a def call with content. This +is where you call a def, and at the same time declare a block of +content (or multiple blocks) that can be used by the def being +called. The main point of such a call is to create custom, +nestable tags, just like any other template language's +custom-tag creation system -- where the external tag controls the +execution of the nested tags and can communicate state to them. +Only with Mako, you don't have to use any external Python +modules, you can define arbitrarily nestable tags right in your +templates. + +To achieve this, the target def is invoked using the form +``<%namepacename:defname>`` instead of the normal ``${}`` +syntax. This syntax, introduced in Mako 0.2.3, is functionally +equivalent to another tag known as ``%call``, which takes the form +``<%call expr='namespacename.defname(args)'>``. While ``%call`` +is available in all versions of Mako, the newer style is +probably more familiar looking. The ``namespace`` portion of the +call is the name of the **namespace** in which the def is +defined -- in the most simple cases, this can be ``local`` or +``self`` to reference the current template's namespace (the +difference between ``local`` and ``self`` is one of inheritance +-- see :ref:`namespaces_builtin` for details). + +When the target def is invoked, a variable ``caller`` is placed +in its context which contains another namespace containing the +body and other defs defined by the caller. The body itself is +referenced by the method ``body()``. Below, we build a ``%def`` +that operates upon ``caller.body()`` to invoke the body of the +custom tag: + +.. sourcecode:: mako + + <%def name="buildtable()"> + + +
+ ${caller.body()} +
+ + + <%self:buildtable> + I am the table body. + + +This produces the output (whitespace formatted): + +.. sourcecode:: html + + + +
+ I am the table body. +
+ +Using the older ``%call`` syntax looks like: + +.. sourcecode:: mako + + <%def name="buildtable()"> + + +
+ ${caller.body()} +
+ + + <%call expr="buildtable()"> + I am the table body. + + +The ``body()`` can be executed multiple times or not at all. +This means you can use def-call-with-content to build iterators, +conditionals, etc: + +.. sourcecode:: mako + + <%def name="lister(count)"> + % for x in range(count): + ${caller.body()} + % endfor + + + <%self:lister count="${3}"> + hi + + +Produces: + +.. sourcecode:: html + + hi + hi + hi + +Notice above we pass ``3`` as a Python expression, so that it +remains as an integer. + +A custom "conditional" tag: + +.. sourcecode:: mako + + <%def name="conditional(expression)"> + % if expression: + ${caller.body()} + % endif + + + <%self:conditional expression="${4==4}"> + i'm the result + + +Produces: + +.. sourcecode:: html + + i'm the result + +But that's not all. The ``body()`` function also can handle +arguments, which will augment the local namespace of the body +callable. The caller must define the arguments which it expects +to receive from its target def using the ``args`` attribute, +which is a comma-separated list of argument names. Below, our +``<%def>`` calls the ``body()`` of its caller, passing in an +element of data from its argument: + +.. sourcecode:: mako + + <%def name="layoutdata(somedata)"> + + % for item in somedata: + + % for col in item: + + % endfor + + % endfor +
${caller.body(col=col)}
+ + + <%self:layoutdata somedata="${[[1,2,3],[4,5,6],[7,8,9]]}" args="col">\ + Body data: ${col}\ + + +Produces: + +.. sourcecode:: html + + + + + + + + + + + + + + + + + +
Body data: 1Body data: 2Body data: 3
Body data: 4Body data: 5Body data: 6
Body data: 7Body data: 8Body data: 9
+ +You don't have to stick to calling just the ``body()`` function. +The caller can define any number of callables, allowing the +``<%call>`` tag to produce whole layouts: + +.. sourcecode:: mako + + <%def name="layout()"> + ## a layout def +
+
+ ${caller.header()} +
+ + + +
+ ${caller.body()} +
+
+ + + ## calls the layout def + <%self:layout> + <%def name="header()"> + I am the header + + <%def name="sidebar()"> +
    +
  • sidebar 1
  • +
  • sidebar 2
  • +
+ + + this is the body + + +The above layout would produce: + +.. sourcecode:: html + +
+
+ I am the header +
+ + + +
+ this is the body +
+
+ +The number of things you can do with ``<%call>`` and/or the +``<%namespacename:defname>`` calling syntax is enormous. You can +create form widget libraries, such as an enclosing ``
`` +tag and nested HTML input elements, or portable wrapping schemes +using ``
`` or other elements. You can create tags that +interpret rows of data, such as from a database, providing the +individual columns of each row to a ``body()`` callable which +lays out the row any way it wants. Basically anything you'd do +with a "custom tag" or tag library in some other system, Mako +provides via ``<%def>`` tags and plain Python callables which are +invoked via ``<%namespacename:defname>`` or ``<%call>``. + +.. _blocks: + +Using Blocks +============ + +The ``<%block>`` tag introduces some new twists on the +``<%def>`` tag which make it more closely tailored towards layout. + +.. versionadded:: 0.4.1 + +An example of a block: + +.. sourcecode:: mako + + + + <%block> + this is a block. + + + + +In the above example, we define a simple block. The block renders its content in the place +that it's defined. Since the block is called for us, it doesn't need a name and the above +is referred to as an **anonymous block**. So the output of the above template will be: + +.. sourcecode:: html + + + + this is a block. + + + +So in fact the above block has absolutely no effect. Its usefulness comes when we start +using modifiers. Such as, we can apply a filter to our block: + +.. sourcecode:: mako + + + + <%block filter="h"> + this is some escaped html. + + + + +or perhaps a caching directive: + +.. sourcecode:: mako + + + + <%block cached="True" cache_timeout="60"> + This content will be cached for 60 seconds. + + + + +Blocks also work in iterations, conditionals, just like defs: + +.. sourcecode:: mako + + % if some_condition: + <%block>condition is met + % endif + +While the block renders at the point it is defined in the template, +the underlying function is present in the generated Python code only +once, so there's no issue with placing a block inside of a loop or +similar. Anonymous blocks are defined as closures in the local +rendering body, so have access to local variable scope: + +.. sourcecode:: mako + + % for i in range(1, 4): + <%block>i is ${i} + % endfor + +Using Named Blocks +------------------ + +Possibly the more important area where blocks are useful is when we +do actually give them names. Named blocks are tailored to behave +somewhat closely to Jinja2's block tag, in that they define an area +of a layout which can be overridden by an inheriting template. In +sharp contrast to the ``<%def>`` tag, the name given to a block is +global for the entire template regardless of how deeply it's nested: + +.. sourcecode:: mako + + + <%block name="header"> + + + <%block name="title">Title</%block> + + + + + ${next.body()} + + + +The above example has two named blocks "``header``" and "``title``", both of which can be referred to +by an inheriting template. A detailed walkthrough of this usage can be found at :ref:`inheritance_toplevel`. + +Note above that named blocks don't have any argument declaration the way defs do. They still implement themselves +as Python functions, however, so they can be invoked additional times beyond their initial definition: + +.. sourcecode:: mako + +
+ <%block name="pagecontrol"> + previous page | + next page + + + + ## some content +
+ + ${pagecontrol()} +
+ +The content referenced by ``pagecontrol`` above will be rendered both above and below the ```` tags. + +To keep things sane, named blocks have restrictions that defs do not: + +* The ``<%block>`` declaration cannot have any argument signature. +* The name of a ``<%block>`` can only be defined once in a template -- an error is raised if two blocks of the same + name occur anywhere in a single template, regardless of nesting. A similar error is raised if a top level def + shares the same name as that of a block. +* A named ``<%block>`` cannot be defined within a ``<%def>``, or inside the body of a "call", i.e. + ``<%call>`` or ``<%namespacename:defname>`` tag. Anonymous blocks can, however. + +Using Page Arguments in Named Blocks +------------------------------------ + +A named block is very much like a top level def. It has a similar +restriction to these types of defs in that arguments passed to the +template via the ``<%page>`` tag aren't automatically available. +Using arguments with the ``<%page>`` tag is described in the section +:ref:`namespaces_body`, and refers to scenarios such as when the +``body()`` method of a template is called from an inherited template passing +arguments, or the template is invoked from an ``<%include>`` tag +with arguments. To allow a named block to share the same arguments +passed to the page, the ``args`` attribute can be used: + +.. sourcecode:: mako + + <%page args="post"/> + + + + + <%block name="post_prose" args="post"> + ${post.content} + + + +Where above, if the template is called via a directive like +``<%include file="post.mako" args="post=post" />``, the ``post`` +variable is available both in the main body as well as the +``post_prose`` block. + +Similarly, the ``**pageargs`` variable is present, in named blocks only, +for those arguments not explicit in the ``<%page>`` tag: + +.. sourcecode:: mako + + <%block name="post_prose"> + ${pageargs['post'].content} + + +The ``args`` attribute is only allowed with named blocks. With +anonymous blocks, the Python function is always rendered in the same +scope as the call itself, so anything available directly outside the +anonymous block is available inside as well. diff --git a/lib3/Mako-0.7.3/doc/_sources/filtering.txt b/lib3/Mako-0.7.3/doc/_sources/filtering.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_sources/filtering.txt @@ -0,0 +1,344 @@ +.. _filtering_toplevel: + +======================= +Filtering and Buffering +======================= + +Expression Filtering +==================== + +As described in the chapter :ref:`syntax_toplevel`, the "``|``" operator can be +applied to a "``${}``" expression to apply escape filters to the +output: + +.. sourcecode:: mako + + ${"this is some text" | u} + +The above expression applies URL escaping to the expression, and +produces ``this+is+some+text``. + +The built-in escape flags are: + +* ``u`` : URL escaping, provided by + ``urllib.quote_plus(string.encode('utf-8'))`` +* ``h`` : HTML escaping, provided by + ``markupsafe.escape(string)`` + + .. versionadded:: 0.3.4 + Prior versions use ``cgi.escape(string, True)``. + +* ``x`` : XML escaping +* ``trim`` : whitespace trimming, provided by ``string.strip()`` +* ``entity`` : produces HTML entity references for applicable + strings, derived from ``htmlentitydefs`` +* ``unicode`` (``str`` on Python 3): produces a Python unicode + string (this function is applied by default) +* ``decode.``: decode input into a Python + unicode with the specified encoding +* ``n`` : disable all default filtering; only filters specified + in the local expression tag will be applied. + +To apply more than one filter, separate them by a comma: + +.. sourcecode:: mako + + ${" some value " | h,trim} + +The above produces ``<tag>some value</tag>``, with +no leading or trailing whitespace. The HTML escaping function is +applied first, the "trim" function second. + +Naturally, you can make your own filters too. A filter is just a +Python function that accepts a single string argument, and +returns the filtered result. The expressions after the ``|`` +operator draw upon the local namespace of the template in which +they appear, meaning you can define escaping functions locally: + +.. sourcecode:: mako + + <%! + def myescape(text): + return "" + text + "" + %> + + Here's some tagged text: ${"text" | myescape} + +Or from any Python module: + +.. sourcecode:: mako + + <%! + import myfilters + %> + + Here's some tagged text: ${"text" | myfilters.tagfilter} + +A page can apply a default set of filters to all expression tags +using the ``expression_filter`` argument to the ``%page`` tag: + +.. sourcecode:: mako + + <%page expression_filter="h"/> + + Escaped text: ${"some html"} + +Result: + +.. sourcecode:: html + + Escaped text: <html>some html</html> + +.. _filtering_default_filters: + +The ``default_filters`` Argument +-------------------------------- + +In addition to the ``expression_filter`` argument, the +``default_filters`` argument to both :class:`.Template` and +:class:`.TemplateLookup` can specify filtering for all expression tags +at the programmatic level. This array-based argument, when given +its default argument of ``None``, will be internally set to +``["unicode"]`` (or ``["str"]`` on Python 3), except when +``disable_unicode=True`` is set in which case it defaults to +``["str"]``: + +.. sourcecode:: python + + t = TemplateLookup(directories=['/tmp'], default_filters=['unicode']) + +To replace the usual ``unicode``/``str`` function with a +specific encoding, the ``decode`` filter can be substituted: + +.. sourcecode:: python + + t = TemplateLookup(directories=['/tmp'], default_filters=['decode.utf8']) + +To disable ``default_filters`` entirely, set it to an empty +list: + +.. sourcecode:: python + + t = TemplateLookup(directories=['/tmp'], default_filters=[]) + +Any string name can be added to ``default_filters`` where it +will be added to all expressions as a filter. The filters are +applied from left to right, meaning the leftmost filter is +applied first. + +.. sourcecode:: python + + t = Template(templatetext, default_filters=['unicode', 'myfilter']) + +To ease the usage of ``default_filters`` with custom filters, +you can also add imports (or other code) to all templates using +the ``imports`` argument: + +.. sourcecode:: python + + t = TemplateLookup(directories=['/tmp'], + default_filters=['unicode', 'myfilter'], + imports=['from mypackage import myfilter']) + +The above will generate templates something like this: + +.. sourcecode:: python + + # .... + from mypackage import myfilter + + def render_body(context): + context.write(myfilter(unicode("some text"))) + +Turning off Filtering with the ``n`` Filter +------------------------------------------- + +In all cases the special ``n`` filter, used locally within an +expression, will **disable** all filters declared in the +``<%page>`` tag as well as in ``default_filters``. Such as: + +.. sourcecode:: mako + + ${'myexpression' | n} + +will render ``myexpression`` with no filtering of any kind, and: + +.. sourcecode:: mako + + ${'myexpression' | n,trim} + +will render ``myexpression`` using the ``trim`` filter only. + +Filtering Defs and Blocks +========================= + +The ``%def`` and ``%block`` tags have an argument called ``filter`` which will apply the +given list of filter functions to the output of the ``%def``: + +.. sourcecode:: mako + + <%def name="foo()" filter="h, trim"> + this is bold + + +When the ``filter`` attribute is applied to a def as above, the def +is automatically **buffered** as well. This is described next. + +Buffering +========= + +One of Mako's central design goals is speed. To this end, all of +the textual content within a template and its various callables +is by default piped directly to the single buffer that is stored +within the :class:`.Context` object. While this normally is easy to +miss, it has certain side effects. The main one is that when you +call a def using the normal expression syntax, i.e. +``${somedef()}``, it may appear that the return value of the +function is the content it produced, which is then delivered to +your template just like any other expression substitution, +except that normally, this is not the case; the return value of +``${somedef()}`` is simply the empty string ``''``. By the time +you receive this empty string, the output of ``somedef()`` has +been sent to the underlying buffer. + +You may not want this effect, if for example you are doing +something like this: + +.. sourcecode:: mako + + ${" results " + somedef() + " more results "} + +If the ``somedef()`` function produced the content "``somedef's +results``", the above template would produce this output: + +.. sourcecode:: html + + somedef's results results more results + +This is because ``somedef()`` fully executes before the +expression returns the results of its concatenation; the +concatenation in turn receives just the empty string as its +middle expression. + +Mako provides two ways to work around this. One is by applying +buffering to the ``%def`` itself: + +.. sourcecode:: mako + + <%def name="somedef()" buffered="True"> + somedef's results + + +The above definition will generate code similar to this: + +.. sourcecode:: python + + def somedef(): + context.push_buffer() + try: + context.write("somedef's results") + finally: + buf = context.pop_buffer() + return buf.getvalue() + +So that the content of ``somedef()`` is sent to a second buffer, +which is then popped off the stack and its value returned. The +speed hit inherent in buffering the output of a def is also +apparent. + +Note that the ``filter`` argument on ``%def`` also causes the def to +be buffered. This is so that the final content of the ``%def`` can +be delivered to the escaping function in one batch, which +reduces method calls and also produces more deterministic +behavior for the filtering function itself, which can possibly +be useful for a filtering function that wishes to apply a +transformation to the text as a whole. + +The other way to buffer the output of a def or any Mako callable +is by using the built-in ``capture`` function. This function +performs an operation similar to the above buffering operation +except it is specified by the caller. + +.. sourcecode:: mako + + ${" results " + capture(somedef) + " more results "} + +Note that the first argument to the ``capture`` function is +**the function itself**, not the result of calling it. This is +because the ``capture`` function takes over the job of actually +calling the target function, after setting up a buffered +environment. To send arguments to the function, just send them +to ``capture`` instead: + +.. sourcecode:: mako + + ${capture(somedef, 17, 'hi', use_paging=True)} + +The above call is equivalent to the unbuffered call: + +.. sourcecode:: mako + + ${somedef(17, 'hi', use_paging=True)} + +Decorating +========== + +.. versionadded:: 0.2.5 + +Somewhat like a filter for a ``%def`` but more flexible, the ``decorator`` +argument to ``%def`` allows the creation of a function that will +work in a similar manner to a Python decorator. The function can +control whether or not the function executes. The original +intent of this function is to allow the creation of custom cache +logic, but there may be other uses as well. + +``decorator`` is intended to be used with a regular Python +function, such as one defined in a library module. Here we'll +illustrate the python function defined in the template for +simplicities' sake: + +.. sourcecode:: mako + + <%! + def bar(fn): + def decorate(context, *args, **kw): + context.write("BAR") + fn(*args, **kw) + context.write("BAR") + return '' + return decorate + %> + + <%def name="foo()" decorator="bar"> + this is foo + + + ${foo()} + +The above template will return, with more whitespace than this, +``"BAR this is foo BAR"``. The function is the render callable +itself (or possibly a wrapper around it), and by default will +write to the context. To capture its output, use the :func:`.capture` +callable in the ``mako.runtime`` module (available in templates +as just ``runtime``): + +.. sourcecode:: mako + + <%! + def bar(fn): + def decorate(context, *args, **kw): + return "BAR" + runtime.capture(context, fn, *args, **kw) + "BAR" + return decorate + %> + + <%def name="foo()" decorator="bar"> + this is foo + + + ${foo()} + +The decorator can be used with top-level defs as well as nested +defs, and blocks too. Note that when calling a top-level def from the +:class:`.Template` API, i.e. ``template.get_def('somedef').render()``, +the decorator has to write the output to the ``context``, i.e. +as in the first example. The return value gets discarded. diff --git a/lib3/Mako-0.7.3/doc/_sources/index.txt b/lib3/Mako-0.7.3/doc/_sources/index.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_sources/index.txt @@ -0,0 +1,22 @@ +Table of Contents +================= + +.. toctree:: + :maxdepth: 2 + + usage + syntax + defs + runtime + namespaces + inheritance + filtering + unicode + caching + +Indices and Tables +------------------ + +* :ref:`genindex` +* :ref:`search` + diff --git a/lib3/Mako-0.7.3/doc/_sources/inheritance.txt b/lib3/Mako-0.7.3/doc/_sources/inheritance.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_sources/inheritance.txt @@ -0,0 +1,534 @@ +.. _inheritance_toplevel: + +=========== +Inheritance +=========== + +.. note:: Most of the inheritance examples here take advantage of a feature that's + new in Mako as of version 0.4.1 called the "block". This tag is very similar to + the "def" tag but is more streamlined for usage with inheritance. Note that + all of the examples here which use blocks can also use defs instead. Contrasting + usages will be illustrated. + +Using template inheritance, two or more templates can organize +themselves into an **inheritance chain**, where content and +functions from all involved templates can be intermixed. The +general paradigm of template inheritance is this: if a template +``A`` inherits from template ``B``, then template ``A`` agrees +to send the executional control to template ``B`` at runtime +(``A`` is called the **inheriting** template). Template ``B``, +the **inherited** template, then makes decisions as to what +resources from ``A`` shall be executed. + +In practice, it looks like this. Here's a hypothetical inheriting +template, ``index.html``: + +.. sourcecode:: mako + + ## index.html + <%inherit file="base.html"/> + + <%block name="header"> + this is some header content + + + this is the body content. + +And ``base.html``, the inherited template: + +.. sourcecode:: mako + + ## base.html + + +
+ <%block name="header"/> +
+ + ${self.body()} + + + + + +Here is a breakdown of the execution: + +#. When ``index.html`` is rendered, control immediately passes to + ``base.html``. +#. ``base.html`` then renders the top part of an HTML document, + then invokes the ``<%block name="header">`` block. It invokes the + underlying ``header()`` function off of a built-in namespace + called ``self`` (this namespace was first introduced in the + :doc:`Namespaces chapter ` in :ref:`namespace_self`). Since + ``index.html`` is the topmost template and also defines a block + called ``header``, it's this ``header`` block that ultimately gets + executed -- instead of the one that's present in ``base.html``. +#. Control comes back to ``base.html``. Some more HTML is + rendered. +#. ``base.html`` executes ``self.body()``. The ``body()`` + function on all template-based namespaces refers to the main + body of the template, therefore the main body of + ``index.html`` is rendered. +#. When ``<%block name="header">`` is encountered in ``index.html`` + during the ``self.body()`` call, a conditional is checked -- does the + current inherited template, i.e. ``base.html``, also define this block? If yes, + the ``<%block>`` is **not** executed here -- the inheritance + mechanism knows that the parent template is responsible for rendering + this block (and in fact it already has). In other words a block + only renders in its *basemost scope*. +#. Control comes back to ``base.html``. More HTML is rendered, + then the ``<%block name="footer">`` expression is invoked. +#. The ``footer`` block is only defined in ``base.html``, so being + the topmost definition of ``footer``, it's the one that + executes. If ``index.html`` also specified ``footer``, then + its version would **override** that of the base. +#. ``base.html`` finishes up rendering its HTML and the template + is complete, producing: + + .. sourcecode:: html + + + +
+ this is some header content +
+ + this is the body content. + + + + + +...and that is template inheritance in a nutshell. The main idea +is that the methods that you call upon ``self`` always +correspond to the topmost definition of that method. Very much +the way ``self`` works in a Python class, even though Mako is +not actually using Python class inheritance to implement this +functionality. (Mako doesn't take the "inheritance" metaphor too +seriously; while useful to setup some commonly recognized +semantics, a textual template is not very much like an +object-oriented class construct in practice). + +Nesting Blocks +============== + +The named blocks defined in an inherited template can also be nested within +other blocks. The name given to each block is globally accessible via any inheriting +template. We can add a new block ``title`` to our ``header`` block: + +.. sourcecode:: mako + + ## base.html + + +
+ <%block name="header"> +

+ <%block name="title"/> +

+ +
+ + ${self.body()} + + + + + +The inheriting template can name either or both of ``header`` and ``title``, separately +or nested themselves: + +.. sourcecode:: mako + + ## index.html + <%inherit file="base.html"/> + + <%block name="header"> + this is some header content + ${parent.header()} + + + <%block name="title"> + this is the title + + + this is the body content. + +Note when we overrode ``header``, we added an extra call ``${parent.header()}`` in order to invoke +the parent's ``header`` block in addition to our own. That's described in more detail below, +in :ref:`parent_namespace`. + +Rendering a Named Block Multiple Times +====================================== + +Recall from the section :ref:`blocks` that a named block is just like a ``<%def>``, +with some different usage rules. We can call one of our named sections distinctly, for example +a section that is used more than once, such as the title of a page: + +.. sourcecode:: mako + + + + ${self.title()} + + + <%block name="header"> +

<%block name="title"/>

+ + ${self.body()} + + + +Where above an inheriting template can define ``<%block name="title">`` just once, and it will be +used in the base template both in the ```` section as well as the ``<h2>``. + +But what about Defs? +==================== + +The previous example used the ``<%block>`` tag to produce areas of content +to be overridden. Before Mako 0.4.1, there wasn't any such tag -- instead +there was only the ``<%def>`` tag. As it turns out, named blocks and defs are +largely interchangeable. The def simply doesn't call itself automatically, +and has more open-ended naming and scoping rules that are more flexible and similar +to Python itself, but less suited towards layout. The first example from +this chapter using defs would look like: + +.. sourcecode:: mako + + ## index.html + <%inherit file="base.html"/> + + <%def name="header()"> + this is some header content + </%def> + + this is the body content. + +And ``base.html``, the inherited template: + +.. sourcecode:: mako + + ## base.html + <html> + <body> + <div class="header"> + ${self.header()} + </div> + + ${self.body()} + + <div class="footer"> + ${self.footer()} + </div> + </body> + </html> + + <%def name="header()"/> + <%def name="footer()"> + this is the footer + </%def> + +Above, we illustrate that defs differ from blocks in that their definition +and invocation are defined in two separate places, instead of at once. You can *almost* do exactly what a +block does if you put the two together: + +.. sourcecode:: mako + + <div class="header"> + <%def name="header()"></%def>${self.header()} + </div> + +The ``<%block>`` is obviously more streamlined than the ``<%def>`` for this kind +of usage. In addition, +the above "inline" approach with ``<%def>`` does not work with nesting: + +.. sourcecode:: mako + + <head> + <%def name="header()"> + <title> + ## this won't work ! + <%def name="title()">default title</%def>${self.title()} + + ${self.header()} + + +Where above, the ``title()`` def, because it's a def within a def, is not part of the +template's exported namespace and will not be part of ``self``. If the inherited template +did define its own ``title`` def at the top level, it would be called, but the "default title" +above is not present at all on ``self`` no matter what. For this to work as expected +you'd instead need to say: + +.. sourcecode:: mako + + + <%def name="header()"> + + ${self.title()} + + ${self.header()} + + <%def name="title()"/> + + +That is, ``title`` is defined outside of any other defs so that it is in the ``self`` namespace. +It works, but the definition needs to be potentially far away from the point of render. + +A named block is always placed in the ``self`` namespace, regardless of nesting, +so this restriction is lifted: + +.. sourcecode:: mako + + ## base.html + + <%block name="header"> + + <%block name="title"/> + + + + +The above template defines ``title`` inside of ``header``, and an inheriting template can define +one or both in **any** configuration, nested inside each other or not, in order for them to be used: + +.. sourcecode:: mako + + ## index.html + <%inherit file="base.html"/> + <%block name="title"> + the title + + <%block name="header"> + the header + + +So while the ``<%block>`` tag lifts the restriction of nested blocks not being available externally, +in order to achieve this it *adds* the restriction that all block names in a single template need +to be globally unique within the template, and additionally that a ``<%block>`` can't be defined +inside of a ``<%def>``. It's a more restricted tag suited towards a more specific use case than ``<%def>``. + +Using the ``next`` Namespace to Produce Content Wrapping +======================================================== + +Sometimes you have an inheritance chain that spans more than two +templates. Or maybe you don't, but you'd like to build your +system such that extra inherited templates can be inserted in +the middle of a chain where they would be smoothly integrated. +If each template wants to define its layout just within its main +body, you can't just call ``self.body()`` to get at the +inheriting template's body, since that is only the topmost body. +To get at the body of the *next* template, you call upon the +namespace ``next``, which is the namespace of the template +**immediately following** the current template. + +Lets change the line in ``base.html`` which calls upon +``self.body()`` to instead call upon ``next.body()``: + +.. sourcecode:: mako + + ## base.html + + +
+ <%block name="header"/> +
+ + ${next.body()} + + + + + + +Lets also add an intermediate template called ``layout.html``, +which inherits from ``base.html``: + +.. sourcecode:: mako + + ## layout.html + <%inherit file="base.html"/> +
    + <%block name="toolbar"> +
  • selection 1
  • +
  • selection 2
  • +
  • selection 3
  • + +
+
+ ${next.body()} +
+ +And finally change ``index.html`` to inherit from +``layout.html`` instead: + +.. sourcecode:: mako + + ## index.html + <%inherit file="layout.html"/> + + ## .. rest of template + +In this setup, each call to ``next.body()`` will render the body +of the next template in the inheritance chain (which can be +written as ``base.html -> layout.html -> index.html``). Control +is still first passed to the bottommost template ``base.html``, +and ``self`` still references the topmost definition of any +particular def. + +The output we get would be: + +.. sourcecode:: html + + + +
+ this is some header content +
+ +
    +
  • selection 1
  • +
  • selection 2
  • +
  • selection 3
  • +
+ +
+ this is the body content. +
+ + + + + +So above, we have the ````, ```` and +``header``/``footer`` layout of ``base.html``, we have the +``
    `` and ``mainlayout`` section of ``layout.html``, and the +main body of ``index.html`` as well as its overridden ``header`` +def. The ``layout.html`` template is inserted into the middle of +the chain without ``base.html`` having to change anything. +Without the ``next`` namespace, only the main body of +``index.html`` could be used; there would be no way to call +``layout.html``'s body content. + +.. _parent_namespace: + +Using the ``parent`` Namespace to Augment Defs +============================================== + +Lets now look at the other inheritance-specific namespace, the +opposite of ``next`` called ``parent``. ``parent`` is the +namespace of the template **immediately preceding** the current +template. What's useful about this namespace is that +defs or blocks can call upon their overridden versions. +This is not as hard as it sounds and +is very much like using the ``super`` keyword in Python. Lets +modify ``index.html`` to augment the list of selections provided +by the ``toolbar`` function in ``layout.html``: + +.. sourcecode:: mako + + ## index.html + <%inherit file="layout.html"/> + + <%block name="header"> + this is some header content + + + <%block name="toolbar"> + ## call the parent's toolbar first + ${parent.toolbar()} +
  • selection 4
  • +
  • selection 5
  • + + + this is the body content. + +Above, we implemented a ``toolbar()`` function, which is meant +to override the definition of ``toolbar`` within the inherited +template ``layout.html``. However, since we want the content +from that of ``layout.html`` as well, we call it via the +``parent`` namespace whenever we want it's content, in this case +before we add our own selections. So the output for the whole +thing is now: + +.. sourcecode:: html + + + +
    + this is some header content +
    + +
      +
    • selection 1
    • +
    • selection 2
    • +
    • selection 3
    • +
    • selection 4
    • +
    • selection 5
    • +
    + +
    + this is the body content. +
    + + + + + +and you're now a template inheritance ninja! + +Inheritable Attributes +====================== + +The :attr:`attr <.Namespace.attr>` accessor of the :class:`.Namespace` object +allows access to module level variables declared in a template. By accessing +``self.attr``, you can access regular attributes from the +inheritance chain as declared in ``<%! %>`` sections. Such as: + +.. sourcecode:: mako + + <%! + class_ = "grey" + %> + +
    + ${self.body()} +
    + +If an inheriting template overrides ``class_`` to be +``"white"``, as in: + +.. sourcecode:: mako + + <%! + class_ = "white" + %> + <%inherit file="parent.html"/> + + This is the body + +you'll get output like: + +.. sourcecode:: html + +
    + This is the body +
    + diff --git a/lib3/Mako-0.7.3/doc/_sources/namespaces.txt b/lib3/Mako-0.7.3/doc/_sources/namespaces.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_sources/namespaces.txt @@ -0,0 +1,349 @@ +.. _namespaces_toplevel: + +========== +Namespaces +========== + +Namespaces are used to organize groups of defs into +categories, and also to "import" defs from other files. + +If the file ``components.html`` defines these two defs: + +.. sourcecode:: mako + + ## components.html + <%def name="comp1()"> + this is comp1 + + + <%def name="comp2(x)"> + this is comp2, x is ${x} + + +you can make another file, for example ``index.html``, that +pulls those two defs into a namespace called ``comp``: + +.. sourcecode:: mako + + ## index.html + <%namespace name="comp" file="components.html"/> + + Here's comp1: ${comp.comp1()} + Here's comp2: ${comp.comp2(x=5)} + +The ``comp`` variable above is an instance of +:class:`.Namespace`, a **proxy object** which delivers +method calls to the underlying template callable using the +current context. + +``<%namespace>`` also provides an ``import`` attribute which can +be used to pull the names into the local namespace, removing the +need to call it via the "``.``" operator. When ``import`` is used, the +``name`` attribute is optional. + +.. sourcecode:: mako + + <%namespace file="components.html" import="comp1, comp2"/> + + Heres comp1: ${comp1()} + Heres comp2: ${comp2(x=5)} + +``import`` also supports the "``*``" operator: + +.. sourcecode:: mako + + <%namespace file="components.html" import="*"/> + + Heres comp1: ${comp1()} + Heres comp2: ${comp2(x=5)} + +The names imported by the ``import`` attribute take precedence +over any names that exist within the current context. + +.. note:: In current versions of Mako, usage of ``import='*'`` is + known to decrease performance of the template. This will be + fixed in a future release. + +The ``file`` argument allows expressions -- if looking for +context variables, the ``context`` must be named explicitly: + +.. sourcecode:: mako + + <%namespace name="dyn" file="${context['namespace_name']}"/> + +Ways to Call Namespaces +======================= + +There are essentially four ways to call a function from a +namespace. + +The "expression" format, as described previously. Namespaces are +just Python objects with functions on them, and can be used in +expressions like any other function: + +.. sourcecode:: mako + + ${mynamespace.somefunction('some arg1', 'some arg2', arg3='some arg3', arg4='some arg4')} + +Synonymous with the "expression" format is the "custom tag" +format, when a "closed" tag is used. This format, introduced in +Mako 0.2.3, allows the usage of a "custom" Mako tag, with the +function arguments passed in using named attributes: + +.. sourcecode:: mako + + <%mynamespace:somefunction arg1="some arg1" arg2="some arg2" arg3="some arg3" arg4="some arg4"/> + +When using tags, the values of the arguments are taken as +literal strings by default. To embed Python expressions as +arguments, use the embedded expression format: + +.. sourcecode:: mako + + <%mynamespace:somefunction arg1="${someobject.format()}" arg2="${somedef(5, 12)}"/> + +The "custom tag" format is intended mainly for namespace +functions which recognize body content, which in Mako is known +as a "def with embedded content": + +.. sourcecode:: mako + + <%mynamespace:somefunction arg1="some argument" args="x, y"> + Some record: ${x}, ${y} + + +The "classic" way to call defs with embedded content is the ``<%call>`` tag: + +.. sourcecode:: mako + + <%call expr="mynamespace.somefunction(arg1='some argument')" args="x, y"> + Some record: ${x}, ${y} + + +For information on how to construct defs that embed content from +the caller, see :ref:`defs_with_content`. + +.. _namespaces_python_modules: + +Namespaces from Regular Python Modules +====================================== + +Namespaces can also import regular Python functions from +modules. These callables need to take at least one argument, +``context``, an instance of :class:`.Context`. A module file +``some/module.py`` might contain the callable: + +.. sourcecode:: python + + def my_tag(context): + context.write("hello world") + return '' + +A template can use this module via: + +.. sourcecode:: mako + + <%namespace name="hw" module="some.module"/> + + ${hw.my_tag()} + +Note that the ``context`` argument is not needed in the call; +the :class:`.Namespace` tag creates a locally-scoped callable which +takes care of it. The ``return ''`` is so that the def does not +dump a ``None`` into the output stream -- the return value of any +def is rendered after the def completes, in addition to whatever +was passed to :meth:`.Context.write` within its body. + +If your def is to be called in an "embedded content" context, +that is as described in :ref:`defs_with_content`, you should use +the :func:`.supports_caller` decorator, which will ensure that Mako +will ensure the correct "caller" variable is available when your +def is called, supporting embedded content: + +.. sourcecode:: python + + from mako.runtime import supports_caller + + @supports_caller + def my_tag(context): + context.write("
    ") + context['caller'].body() + context.write("
    ") + return '' + +Capturing of output is available as well, using the +outside-of-templates version of the :func:`.capture` function, +which accepts the "context" as its first argument: + +.. sourcecode:: python + + from mako.runtime import supports_caller, capture + + @supports_caller + def my_tag(context): + return "
    %s
    " % \ + capture(context, context['caller'].body, x="foo", y="bar") + +Declaring Defs in Namespaces +============================ + +The ``<%namespace>`` tag supports the definition of ``<%def>``\ s +directly inside the tag. These defs become part of the namespace +like any other function, and will override the definitions +pulled in from a remote template or module: + +.. sourcecode:: mako + + ## define a namespace + <%namespace name="stuff"> + <%def name="comp1()"> + comp1 + + + + ## then call it + ${stuff.comp1()} + +.. _namespaces_body: + +The ``body()`` Method +===================== + +Every namespace that is generated from a template contains a +method called ``body()``. This method corresponds to the main +body of the template, and plays its most important roles when +using inheritance relationships as well as +def-calls-with-content. + +Since the ``body()`` method is available from a namespace just +like all the other defs defined in a template, what happens if +you send arguments to it? By default, the ``body()`` method +accepts no positional arguments, and for usefulness in +inheritance scenarios will by default dump all keyword arguments +into a dictionary called ``pageargs``. But if you actually want +to get at the keyword arguments, Mako recommends you define your +own argument signature explicitly. You do this via using the +``<%page>`` tag: + +.. sourcecode:: mako + + <%page args="x, y, someval=8, scope='foo', **kwargs"/> + +A template which defines the above signature requires that the +variables ``x`` and ``y`` are defined, defines default values +for ``someval`` and ``scope``, and sets up ``**kwargs`` to +receive all other keyword arguments. If ``**kwargs`` or similar +is not present, the argument ``**pageargs`` gets tacked on by +Mako. When the template is called as a top-level template (i.e. +via :meth:`~.Template.render`) or via the ``<%include>`` tag, the +values for these arguments will be pulled from the ``Context``. +In all other cases, i.e. via calling the ``body()`` method, the +arguments are taken as ordinary arguments from the method call. +So above, the body might be called as: + +.. sourcecode:: mako + + ${self.body(5, y=10, someval=15, delta=7)} + +The :class:`.Context` object also supplies a :attr:`~.Context.kwargs` accessor, for +cases when you'd like to pass along whatever is in the context to +a ``body()`` callable: + +.. sourcecode:: mako + + ${next.body(**context.kwargs)} + +The usefulness of calls like the above become more apparent when +one works with inheriting templates. For more information on +this, as well as the meanings of the names ``self`` and +``next``, see :ref:`inheritance_toplevel`. + +.. _namespaces_builtin: + +Built-in Namespaces +=================== + +The namespace is so great that Mako gives your template one (or +two) for free. The names of these namespaces are ``local`` and +``self``. Other built-in namespaces include ``parent`` and +``next``, which are optional and are described in +:ref:`inheritance_toplevel`. + +.. _namespace_local: + +``local`` +--------- + +The ``local`` namespace is basically the namespace for the +currently executing template. This means that all of the top +level defs defined in your template, as well as your template's +``body()`` function, are also available off of the ``local`` +namespace. + +The ``local`` namespace is also where properties like ``uri``, +``filename``, and ``module`` and the ``get_namespace`` method +can be particularly useful. + +.. _namespace_self: + +``self`` +-------- + +The ``self`` namespace, in the case of a template that does not +use inheritance, is synonymous with ``local``. If inheritance is +used, then ``self`` references the topmost template in the +inheritance chain, where it is most useful for providing the +ultimate form of various "method" calls which may have been +overridden at various points in an inheritance chain. See +:ref:`inheritance_toplevel`. + +Inheritable Namespaces +====================== + +The ``<%namespace>`` tag includes an optional attribute +``inheritable="True"``, which will cause the namespace to be +attached to the ``self`` namespace. Since ``self`` is globally +available throughout an inheritance chain (described in the next +section), all the templates in an inheritance chain can get at +the namespace imported in a super-template via ``self``. + +.. sourcecode:: mako + + ## base.html + <%namespace name="foo" file="foo.html" inheritable="True"/> + + ${next.body()} + + ## somefile.html + <%inherit file="base.html"/> + + ${self.foo.bar()} + +This allows a super-template to load a whole bunch of namespaces +that its inheriting templates can get to, without them having to +explicitly load those namespaces themselves. + +The ``import="*"`` part of the ``<%namespace>`` tag doesn't yet +interact with the ``inheritable`` flag, so currently you have to +use the explicit namespace name off of ``self``, followed by the +desired function name. But more on this in a future release. + +API Reference +============= + +.. autoclass:: mako.runtime.Namespace + :show-inheritance: + :members: + +.. autoclass:: mako.runtime.TemplateNamespace + :show-inheritance: + :members: + +.. autoclass:: mako.runtime.ModuleNamespace + :show-inheritance: + :members: + +.. autofunction:: mako.runtime.supports_caller + +.. autofunction:: mako.runtime.capture + diff --git a/lib3/Mako-0.7.3/doc/_sources/runtime.txt b/lib3/Mako-0.7.3/doc/_sources/runtime.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_sources/runtime.txt @@ -0,0 +1,448 @@ +.. _runtime_toplevel: + +============================ +The Mako Runtime Environment +============================ + +This section describes a little bit about the objects and +built-in functions that are available in templates. + +.. _context: + +Context +======= + +The :class:`.Context` is the central object that is created when +a template is first executed, and is responsible for handling +all communication with the outside world. Within the template +environment, it is available via the :ref:`reserved name ` +``context``. The :class:`.Context` includes two +major components, one of which is the output buffer, which is a +file-like object such as Python's ``StringIO`` or similar, and +the other a dictionary of variables that can be freely +referenced within a template; this dictionary is a combination +of the arguments sent to the :meth:`~.Template.render` function and +some built-in variables provided by Mako's runtime environment. + +The Buffer +---------- + +The buffer is stored within the :class:`.Context`, and writing +to it is achieved by calling the :meth:`~.Context.write` method +-- in a template this looks like ``context.write('some string')``. +You usually don't need to care about this, as all text within a template, as +well as all expressions provided by ``${}``, automatically send +everything to this method. The cases you might want to be aware +of its existence are if you are dealing with various +filtering/buffering scenarios, which are described in +:ref:`filtering_toplevel`, or if you want to programmatically +send content to the output stream, such as within a ``<% %>`` +block. + +.. sourcecode:: mako + + <% + context.write("some programmatic text") + %> + +The actual buffer may or may not be the original buffer sent to +the :class:`.Context` object, as various filtering/caching +scenarios may "push" a new buffer onto the context's underlying +buffer stack. For this reason, just stick with +``context.write()`` and content will always go to the topmost +buffer. + +.. _context_vars: + +Context Variables +----------------- + +When your template is compiled into a Python module, the body +content is enclosed within a Python function called +``render_body``. Other top-level defs defined in the template are +defined within their own function bodies which are named after +the def's name with the prefix ``render_`` (i.e. ``render_mydef``). +One of the first things that happens within these functions is +that all variable names that are referenced within the function +which are not defined in some other way (i.e. such as via +assignment, module level imports, etc.) are pulled from the +:class:`.Context` object's dictionary of variables. This is how you're +able to freely reference variable names in a template which +automatically correspond to what was passed into the current +:class:`.Context`. + +* **What happens if I reference a variable name that is not in + the current context?** - The value you get back is a special + value called ``UNDEFINED``, or if the ``strict_undefined=True`` flag + is used a ``NameError`` is raised. ``UNDEFINED`` is just a simple global + variable with the class :class:`mako.runtime.Undefined`. The + ``UNDEFINED`` object throws an error when you call ``str()`` on + it, which is what happens if you try to use it in an + expression. +* **UNDEFINED makes it hard for me to find what name is missing** - An alternative + is to specify the option ``strict_undefined=True`` + to the :class:`.Template` or :class:`.TemplateLookup`. This will cause + any non-present variables to raise an immediate ``NameError`` + which includes the name of the variable in its message + when :meth:`~.Template.render` is called -- ``UNDEFINED`` is not used. + + .. versionadded:: 0.3.6 + +* **Why not just return None?** Using ``UNDEFINED``, or + raising a ``NameError`` is more + explicit and allows differentiation between a value of ``None`` + that was explicitly passed to the :class:`.Context` and a value that + wasn't present at all. +* **Why raise an exception when you call str() on it ? Why not + just return a blank string?** - Mako tries to stick to the + Python philosophy of "explicit is better than implicit". In + this case, it's decided that the template author should be made + to specifically handle a missing value rather than + experiencing what may be a silent failure. Since ``UNDEFINED`` + is a singleton object just like Python's ``True`` or ``False``, + you can use the ``is`` operator to check for it: + + .. sourcecode:: mako + + % if someval is UNDEFINED: + someval is: no value + % else: + someval is: ${someval} + % endif + +Another facet of the :class:`.Context` is that its dictionary of +variables is **immutable**. Whatever is set when +:meth:`~.Template.render` is called is what stays. Of course, since +its Python, you can hack around this and change values in the +context's internal dictionary, but this will probably will not +work as well as you'd think. The reason for this is that Mako in +many cases creates copies of the :class:`.Context` object, which +get sent to various elements of the template and inheriting +templates used in an execution. So changing the value in your +local :class:`.Context` will not necessarily make that value +available in other parts of the template's execution. Examples +of where Mako creates copies of the :class:`.Context` include +within top-level def calls from the main body of the template +(the context is used to propagate locally assigned variables +into the scope of defs; since in the template's body they appear +as inlined functions, Mako tries to make them act that way), and +within an inheritance chain (each template in an inheritance +chain has a different notion of ``parent`` and ``next``, which +are all stored in unique :class:`.Context` instances). + +* **So what if I want to set values that are global to everyone + within a template request?** - All you have to do is provide a + dictionary to your :class:`.Context` when the template first + runs, and everyone can just get/set variables from that. Lets + say its called ``attributes``. + + Running the template looks like: + + .. sourcecode:: python + + output = template.render(attributes={}) + + Within a template, just reference the dictionary: + + .. sourcecode:: mako + + <% + attributes['foo'] = 'bar' + %> + 'foo' attribute is: ${attributes['foo']} + +* **Why can't "attributes" be a built-in feature of the + Context?** - This is an area where Mako is trying to make as + few decisions about your application as it possibly can. + Perhaps you don't want your templates to use this technique of + assigning and sharing data, or perhaps you have a different + notion of the names and kinds of data structures that should + be passed around. Once again Mako would rather ask the user to + be explicit. + +Context Methods and Accessors +----------------------------- + +Significant members of :class:`.Context` include: + +* ``context[key]`` / ``context.get(key, default=None)`` - + dictionary-like accessors for the context. Normally, any + variable you use in your template is automatically pulled from + the context if it isn't defined somewhere already. Use the + dictionary accessor and/or ``get`` method when you want a + variable that *is* already defined somewhere else, such as in + the local arguments sent to a ``%def`` call. If a key is not + present, like a dictionary it raises ``KeyError``. +* ``keys()`` - all the names defined within this context. +* ``kwargs`` - this returns a **copy** of the context's + dictionary of variables. This is useful when you want to + propagate the variables in the current context to a function + as keyword arguments, i.e.: + + .. sourcecode:: mako + + ${next.body(**context.kwargs)} + +* ``write(text)`` - write some text to the current output + stream. +* ``lookup`` - returns the :class:`.TemplateLookup` instance that is + used for all file-lookups within the current execution (even + though individual :class:`.Template` instances can conceivably have + different instances of a :class:`.TemplateLookup`, only the + :class:`.TemplateLookup` of the originally-called :class:`.Template` gets + used in a particular execution). + +.. _loop_context: + +The Loop Context +================ + +Within ``% for`` blocks, the :ref:`reserved name` ``loop`` +is available. ``loop`` tracks the progress of +the ``for`` loop and makes it easy to use the iteration state to control +template behavior: + +.. sourcecode:: mako + +
      + % for a in ("one", "two", "three"): +
    • Item ${loop.index}: ${a}
    • + % endfor +
    + +.. versionadded:: 0.7 + +Iterations +---------- + +Regardless of the type of iterable you're looping over, ``loop`` always tracks +the 0-indexed iteration count (available at ``loop.index``), its parity +(through the ``loop.even`` and ``loop.odd`` bools), and ``loop.first``, a bool +indicating whether the loop is on its first iteration. If your iterable +provides a ``__len__`` method, ``loop`` also provides access to +a count of iterations remaining at ``loop.reverse_index`` and ``loop.last``, +a bool indicating whether the loop is on its last iteration; accessing these +without ``__len__`` will raise a ``TypeError``. + +Cycling +------- + +Cycling is available regardless of whether the iterable you're using provides +a ``__len__`` method. Prior to Mako 0.7, you might have generated a simple +zebra striped list using ``enumerate``: + +.. sourcecode:: mako + +
      + % for i, item in enumerate(('spam', 'ham', 'eggs')): +
    • ${item}
    • + % endfor +
    + +With ``loop.cycle``, you get the same results with cleaner code and less prep work: + +.. sourcecode:: mako + +
      + % for item in ('spam', 'ham', 'eggs'): +
    • ${item}
    • + % endfor +
    + +Both approaches produce output like the following: + +.. sourcecode:: html + +
      +
    • spam
    • +
    • ham
    • +
    • eggs
    • +
    + +Parent Loops +------------ + +Loop contexts can also be transparently nested, and the Mako runtime will do +the right thing and manage the scope for you. You can access the parent loop +context through ``loop.parent``. + +This allows you to reach all the way back up through the loop stack by +chaining ``parent`` attribute accesses, i.e. ``loop.parent.parent....`` as +long as the stack depth isn't exceeded. For example, you can use the parent +loop to make a checkered table: + +.. sourcecode:: mako + +
+ % for consonant in 'pbj': + + % for vowel in 'iou': + + % endfor + + % endfor +
+ ${consonant + vowel}t +
+ +.. sourcecode:: html + + + + + + + + + + + + + + + + + +
+ pit + + pot + + put +
+ bit + + bot + + but +
+ jit + + jot + + jut +
+ +.. _migrating_loop: + +Migrating Legacy Templates that Use the Word "loop" +--------------------------------------------------- + +.. versionchanged:: 0.7 + The ``loop`` name is now :ref:`reserved ` in Mako, + which means a template that refers to a variable named ``loop`` + won't function correctly when used in Mako 0.7. + +To ease the transition for such systems, the feature can be disabled across the board for +all templates, then re-enabled on a per-template basis for those templates which wish +to make use of the new system. + +First, the ``enable_loop=False`` flag is passed to either the :class:`.TemplateLookup` +or :class:`.Template` object in use: + +.. sourcecode:: python + + lookup = TemplateLookup(directories=['/docs'], enable_loop=False) + +or: + +.. sourcecode:: python + + template = Template("some template", enable_loop=False) + +An individual template can make usage of the feature when ``enable_loop`` is set to +``False`` by switching it back on within the ``<%page>`` tag: + +.. sourcecode:: mako + + <%page enable_loop="True"/> + + % for i in collection: + ${i} ${loop.index} + % endfor + +Using the above scheme, it's safe to pass the name ``loop`` to the :meth:`.Template.render` +method as well as to freely make usage of a variable named ``loop`` within a template, provided +the ``<%page>`` tag doesn't override it. New templates that want to use the ``loop`` context +can then set up ``<%page enable_loop="True"/>`` to use the new feature without affecting +old templates. + +All the Built-in Names +====================== + +A one-stop shop for all the names Mako defines. Most of these +names are instances of :class:`.Namespace`, which are described +in the next section, :ref:`namespaces_toplevel`. Also, most of +these names other than ``context``, ``UNDEFINED``, and ``loop`` are +also present *within* the :class:`.Context` itself. The names +``context``, ``loop`` and ``UNDEFINED`` themselves can't be passed +to the context and can't be substituted -- see the section :ref:`reserved_names`. + +* ``context`` - this is the :class:`.Context` object, introduced + at :ref:`context`. +* ``local`` - the namespace of the current template, described + in :ref:`namespaces_builtin`. +* ``self`` - the namespace of the topmost template in an + inheritance chain (if any, otherwise the same as ``local``), + mostly described in :ref:`inheritance_toplevel`. +* ``parent`` - the namespace of the parent template in an + inheritance chain (otherwise undefined); see + :ref:`inheritance_toplevel`. +* ``next`` - the namespace of the next template in an + inheritance chain (otherwise undefined); see + :ref:`inheritance_toplevel`. +* ``caller`` - a "mini" namespace created when using the + ``<%call>`` tag to define a "def call with content"; described + in :ref:`defs_with_content`. +* ``loop`` - this provides access to :class:`.LoopContext` objects when + they are requested within ``% for`` loops, introduced at :ref:`loop_context`. +* ``capture`` - a function that calls a given def and captures + its resulting content into a string, which is returned. Usage + is described in :ref:`filtering_toplevel`. +* ``UNDEFINED`` - a global singleton that is applied to all + otherwise uninitialized template variables that were not + located within the :class:`.Context` when rendering began, + unless the :class:`.Template` flag ``strict_undefined`` + is set to ``True``. ``UNDEFINED`` is + an instance of :class:`.Undefined`, and raises an + exception when its ``__str__()`` method is called. +* ``pageargs`` - this is a dictionary which is present in a + template which does not define any ``**kwargs`` section in its + ``<%page>`` tag. All keyword arguments sent to the ``body()`` + function of a template (when used via namespaces) go here by + default unless otherwise defined as a page argument. If this + makes no sense, it shouldn't; read the section + :ref:`namespaces_body`. + +.. _reserved_names: + +Reserved Names +-------------- + +Mako has a few names that are considered to be "reserved" and can't be used +as variable names. + +.. versionchanged:: 0.7 + Mako raises an error if these words are found passed to the template + as context arguments, whereas in previous versions they'd be silently + ignored or lead to other error messages. + +* ``context`` - see :ref:`context`. +* ``UNDEFINED`` - see :ref:`context_vars`. +* ``loop`` - see :ref:`loop_context`. Note this can be disabled for legacy templates + via the ``enable_loop=False`` argument; see :ref:`migrating_loop`. + +API Reference +============= + +.. autoclass:: mako.runtime.Context + :show-inheritance: + :members: + +.. autoclass:: mako.runtime.LoopContext + :show-inheritance: + :members: + +.. autoclass:: mako.runtime.Undefined + :show-inheritance: + diff --git a/lib3/Mako-0.7.3/doc/_sources/syntax.txt b/lib3/Mako-0.7.3/doc/_sources/syntax.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_sources/syntax.txt @@ -0,0 +1,469 @@ +.. _syntax_toplevel: + +====== +Syntax +====== + +A Mako template is parsed from a text stream containing any kind +of content, XML, HTML, email text, etc. The template can further +contain Mako-specific directives which represent variable and/or +expression substitutions, control structures (i.e. conditionals +and loops), server-side comments, full blocks of Python code, as +well as various tags that offer additional functionality. All of +these constructs compile into real Python code. This means that +you can leverage the full power of Python in almost every aspect +of a Mako template. + +Expression Substitution +======================= + +The simplest expression is just a variable substitution. The +syntax for this is the ``${}`` construct, which is inspired by +Perl, Genshi, JSP EL, and others: + +.. sourcecode:: mako + + this is x: ${x} + +Above, the string representation of ``x`` is applied to the +template's output stream. If you're wondering where ``x`` comes +from, it's usually from the :class:`.Context` supplied to the +template's rendering function. If ``x`` was not supplied to the +template and was not otherwise assigned locally, it evaluates to +a special value ``UNDEFINED``. More on that later. + +The contents within the ``${}`` tag are evaluated by Python +directly, so full expressions are OK: + +.. sourcecode:: mako + + pythagorean theorem: ${pow(x,2) + pow(y,2)} + +The results of the expression are evaluated into a string result +in all cases before being rendered to the output stream, such as +the above example where the expression produces a numeric +result. + +Expression Escaping +=================== + +Mako includes a number of built-in escaping mechanisms, +including HTML, URI and XML escaping, as well as a "trim" +function. These escapes can be added to an expression +substitution using the ``|`` operator: + +.. sourcecode:: mako + + ${"this is some text" | u} + +The above expression applies URL escaping to the expression, and +produces ``this+is+some+text``. The ``u`` name indicates URL +escaping, whereas ``h`` represents HTML escaping, ``x`` +represents XML escaping, and ``trim`` applies a trim function. + +Read more about built-in filtering functions, including how to +make your own filter functions, in :ref:`filtering_toplevel`. + +Control Structures +================== + +A control structure refers to all those things that control the +flow of a program -- conditionals (i.e. ``if``/``else``), loops (like +``while`` and ``for``), as well as things like ``try``/``except``. In Mako, +control structures are written using the ``%`` marker followed +by a regular Python control expression, and are "closed" by +using another ``%`` marker with the tag "``end``", where +"````" is the keyword of the expression: + +.. sourcecode:: mako + + % if x==5: + this is some output + % endif + +The ``%`` can appear anywhere on the line as long as no text +precedes it; indentation is not significant. The full range of +Python "colon" expressions are allowed here, including +``if``/``elif``/``else``, ``while``, ``for``, and even ``def``, although +Mako has a built-in tag for defs which is more full-featured. + +.. sourcecode:: mako + + % for a in ['one', 'two', 'three', 'four', 'five']: + % if a[0] == 't': + its two or three + % elif a[0] == 'f': + four/five + % else: + one + % endif + % endfor + +The ``%`` sign can also be "escaped", if you actually want to +emit a percent sign as the first non whitespace character on a +line, by escaping it as in ``%%``: + +.. sourcecode:: mako + + %% some text + + %% some more text + +The Loop Context +---------------- + +The **loop context** provides additional information about a loop +while inside of a ``% for`` structure: + +.. sourcecode:: mako + +
    + % for a in ("one", "two", "three"): +
  • Item ${loop.index}: ${a}
  • + % endfor +
+ +See :ref:`loop_context` for more information on this feature. + +.. versionadded:: 0.7 + +Comments +======== + +Comments come in two varieties. The single line comment uses +``##`` as the first non-space characters on a line: + +.. sourcecode:: mako + + ## this is a comment. + ...text ... + +A multiline version exists using ``<%doc> ...text... ``: + +.. sourcecode:: mako + + <%doc> + these are comments + more comments + + +Newline Filters +=============== + +The backslash ("``\``") character, placed at the end of any +line, will consume the newline character before continuing to +the next line: + +.. sourcecode:: mako + + here is a line that goes onto \ + another line. + +The above text evaluates to: + +.. sourcecode:: text + + here is a line that goes onto another line. + +Python Blocks +============= + +Any arbitrary block of python can be dropped in using the ``<% +%>`` tags: + +.. sourcecode:: mako + + this is a template + <% + x = db.get_resource('foo') + y = [z.element for z in x if x.frobnizzle==5] + %> + % for elem in y: + element: ${elem} + % endfor + +Within ``<% %>``, you're writing a regular block of Python code. +While the code can appear with an arbitrary level of preceding +whitespace, it has to be consistently formatted with itself. +Mako's compiler will adjust the block of Python to be consistent +with the surrounding generated Python code. + +Module-level Blocks +=================== + +A variant on ``<% %>`` is the module-level code block, denoted +by ``<%! %>``. Code within these tags is executed at the module +level of the template, and not within the rendering function of +the template. Therefore, this code does not have access to the +template's context and is only executed when the template is +loaded into memory (which can be only once per application, or +more, depending on the runtime environment). Use the ``<%! %>`` +tags to declare your template's imports, as well as any +pure-Python functions you might want to declare: + +.. sourcecode:: mako + + <%! + import mylib + import re + + def filter(text): + return re.sub(r'^@', '', text) + %> + +Any number of ``<%! %>`` blocks can be declared anywhere in a +template; they will be rendered in the resulting module +in a single contiguous block above all render callables, +in the order in which they appear in the source template. + +Tags +==== + +The rest of what Mako offers takes place in the form of tags. +All tags use the same syntax, which is similar to an XML tag +except that the first character of the tag name is a ``%`` +character. The tag is closed either by a contained slash +character, or an explicit closing tag: + +.. sourcecode:: mako + + <%include file="foo.txt"/> + + <%def name="foo" buffered="True"> + this is a def + + +All tags have a set of attributes which are defined for each +tag. Some of these attributes are required. Also, many +attributes support **evaluation**, meaning you can embed an +expression (using ``${}``) inside the attribute text: + +.. sourcecode:: mako + + <%include file="/foo/bar/${myfile}.txt"/> + +Whether or not an attribute accepts runtime evaluation depends +on the type of tag and how that tag is compiled into the +template. The best way to find out if you can stick an +expression in is to try it! The lexer will tell you if it's not +valid. + +Heres a quick summary of all the tags: + +``<%page>`` +----------- + +This tag defines general characteristics of the template, +including caching arguments, and optional lists of arguments +which the template expects when invoked. + +.. sourcecode:: mako + + <%page args="x, y, z='default'"/> + +Or a page tag that defines caching characteristics: + +.. sourcecode:: mako + + <%page cached="True" cache_type="memory"/> + +Currently, only one ``<%page>`` tag gets used per template, the +rest get ignored. While this will be improved in a future +release, for now make sure you have only one ``<%page>`` tag +defined in your template, else you may not get the results you +want. The details of what ``<%page>`` is used for are described +further in :ref:`namespaces_body` as well as :ref:`caching_toplevel`. + +``<%include>`` +-------------- + +A tag that is familiar from other template languages, ``%include`` +is a regular joe that just accepts a file argument and calls in +the rendered result of that file: + +.. sourcecode:: mako + + <%include file="header.html"/> + + hello world + + <%include file="footer.html"/> + +Include also accepts arguments which are available as ``<%page>`` arguments in the receiving template: + +.. sourcecode:: mako + + <%include file="toolbar.html" args="current_section='members', username='ed'"/> + +``<%def>`` +---------- + +The ``%def`` tag defines a Python function which contains a set +of content, that can be called at some other point in the +template. The basic idea is simple: + +.. sourcecode:: mako + + <%def name="myfunc(x)"> + this is myfunc, x is ${x} + + + ${myfunc(7)} + +The ``%def`` tag is a lot more powerful than a plain Python ``def``, as +the Mako compiler provides many extra services with ``%def`` that +you wouldn't normally have, such as the ability to export defs +as template "methods", automatic propagation of the current +:class:`.Context`, buffering/filtering/caching flags, and def calls +with content, which enable packages of defs to be sent as +arguments to other def calls (not as hard as it sounds). Get the +full deal on what ``%def`` can do in :ref:`defs_toplevel`. + +``<%block>`` +------------ + +``%block`` is a tag that is close to a ``%def``, +except executes itself immediately in its base-most scope, +and can also be anonymous (i.e. with no name): + +.. sourcecode:: mako + + <%block filter="h"> + some stuff. + + +Inspired by Jinja2 blocks, named blocks offer a syntactically pleasing way +to do inheritance: + +.. sourcecode:: mako + + + + <%block name="header"> +

<%block name="title"/>

+ + ${self.body()} + + + +Blocks are introduced in :ref:`blocks` and further described in :ref:`inheritance_toplevel`. + +.. versionadded:: 0.4.1 + +``<%namespace>`` +---------------- + +``%namespace`` is Mako's equivalent of Python's ``import`` +statement. It allows access to all the rendering functions and +metadata of other template files, plain Python modules, as well +as locally defined "packages" of functions. + +.. sourcecode:: mako + + <%namespace file="functions.html" import="*"/> + +The underlying object generated by ``%namespace``, an instance of +:class:`.mako.runtime.Namespace`, is a central construct used in +templates to reference template-specific information such as the +current URI, inheritance structures, and other things that are +not as hard as they sound right here. Namespaces are described +in :ref:`namespaces_toplevel`. + +``<%inherit>`` +-------------- + +Inherit allows templates to arrange themselves in **inheritance +chains**. This is a concept familiar in many other template +languages. + +.. sourcecode:: mako + + <%inherit file="base.html"/> + +When using the ``%inherit`` tag, control is passed to the topmost +inherited template first, which then decides how to handle +calling areas of content from its inheriting templates. Mako +offers a lot of flexibility in this area, including dynamic +inheritance, content wrapping, and polymorphic method calls. +Check it out in :ref:`inheritance_toplevel`. + +``<%``\ nsname\ ``:``\ defname\ ``>`` +------------------------------------- + +Any user-defined "tag" can be created against +a namespace by using a tag with a name of the form +``<%:>``. The closed and open formats of such a +tag are equivalent to an inline expression and the ``<%call>`` +tag, respectively. + +.. sourcecode:: mako + + <%mynamespace:somedef param="some value"> + this is the body + + +To create custom tags which accept a body, see +:ref:`defs_with_content`. + +.. versionadded:: 0.2.3 + +``<%call>`` +----------- + +The call tag is the "classic" form of a user-defined tag, and is +roughly equivalent to the ``<%namespacename:defname>`` syntax +described above. This tag is also described in :ref:`defs_with_content`. + +``<%doc>`` +---------- + +The ``%doc`` tag handles multiline comments: + +.. sourcecode:: mako + + <%doc> + these are comments + more comments + + +Also the ``##`` symbol as the first non-space characters on a line can be used for single line comments. + +``<%text>`` +----------- + +This tag suspends the Mako lexer's normal parsing of Mako +template directives, and returns its entire body contents as +plain text. It is used pretty much to write documentation about +Mako: + +.. sourcecode:: mako + + <%text filter="h"> + heres some fake mako ${syntax} + <%def name="x()">${x} + + +Returning Early from a Template +=============================== + +Sometimes you want to stop processing a template or ``<%def>`` +method in the middle and just use the text you've accumulated so +far. You can use a ``return`` statement inside a Python +block to do that. + +.. sourcecode:: mako + + % if not len(records): + No records found. + <% return %> + % endif + +Or perhaps: + +.. sourcecode:: mako + + <% + if not len(records): + return + %> + diff --git a/lib3/Mako-0.7.3/doc/_sources/unicode.txt b/lib3/Mako-0.7.3/doc/_sources/unicode.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_sources/unicode.txt @@ -0,0 +1,345 @@ +.. _unicode_toplevel: + +=================== +The Unicode Chapter +=================== + +The Python language supports two ways of representing what we +know as "strings", i.e. series of characters. In Python 2, the +two types are ``string`` and ``unicode``, and in Python 3 they are +``bytes`` and ``string``. A key aspect of the Python 2 ``string`` and +Python 3 ``bytes`` types are that they contain no information +regarding what **encoding** the data is stored in. For this +reason they were commonly referred to as **byte strings** on +Python 2, and Python 3 makes this name more explicit. The +origins of this come from Python's background of being developed +before the Unicode standard was even available, back when +strings were C-style strings and were just that, a series of +bytes. Strings that had only values below 128 just happened to +be **ASCII** strings and were printable on the console, whereas +strings with values above 128 would produce all kinds of +graphical characters and bells. + +Contrast the "byte-string" type with the "unicode/string" type. +Objects of this latter type are created whenever you say something like +``u"hello world"`` (or in Python 3, just ``"hello world"``). In this +case, Python represents each character in the string internally +using multiple bytes per character (something similar to +UTF-16). What's important is that when using the +``unicode``/``string`` type to store strings, Python knows the +data's encoding; it's in its own internal format. Whereas when +using the ``string``/``bytes`` type, it does not. + +When Python 2 attempts to treat a byte-string as a string, which +means it's attempting to compare/parse its characters, to coerce +it into another encoding, or to decode it to a unicode object, +it has to guess what the encoding is. In this case, it will +pretty much always guess the encoding as ``ascii``... and if the +byte-string contains bytes above value 128, you'll get an error. +Python 3 eliminates much of this confusion by just raising an +error unconditionally if a byte-string is used in a +character-aware context. + +There is one operation that Python *can* do with a non-ASCII +byte-string, and it's a great source of confusion: it can dump the +byte-string straight out to a stream or a file, with nary a care +what the encoding is. To Python, this is pretty much like +dumping any other kind of binary data (like an image) to a +stream somewhere. In Python 2, it is common to see programs that +embed all kinds of international characters and encodings into +plain byte-strings (i.e. using ``"hello world"`` style literals) +can fly right through their run, sending reams of strings out to +wherever they are going, and the programmer, seeing the same +output as was expressed in the input, is now under the illusion +that his or her program is Unicode-compliant. In fact, their +program has no unicode awareness whatsoever, and similarly has +no ability to interact with libraries that *are* unicode aware. +Python 3 makes this much less likely by defaulting to unicode as +the storage format for strings. + +The "pass through encoded data" scheme is what template +languages like Cheetah and earlier versions of Myghty do by +default. Mako as of version 0.2 also supports this mode of +operation when using Python 2, using the ``disable_unicode=True`` +flag. However, when using Mako in its default mode of +unicode-aware, it requires explicitness when dealing with +non-ASCII encodings. Additionally, if you ever need to handle +unicode strings and other kinds of encoding conversions more +intelligently, the usage of raw byte-strings quickly becomes a +nightmare, since you are sending the Python interpreter +collections of bytes for which it can make no intelligent +decisions with regards to encoding. In Python 3 Mako only allows +usage of native, unicode strings. + +In normal Mako operation, all parsed template constructs and +output streams are handled internally as Python ``unicode`` +objects. It's only at the point of :meth:`~.Template.render` that this unicode +stream may be rendered into whatever the desired output encoding +is. The implication here is that the template developer must +:ensure that :ref:`the encoding of all non-ASCII templates is explicit +` (still required in Python 3), +that :ref:`all non-ASCII-encoded expressions are in one way or another +converted to unicode ` +(not much of a burden in Python 3), and that :ref:`the output stream of the +template is handled as a unicode stream being encoded to some +encoding ` (still required in Python 3). + +.. _set_template_file_encoding: + +Specifying the Encoding of a Template File +========================================== + +This is the most basic encoding-related setting, and it is +equivalent to Python's "magic encoding comment", as described in +`pep-0263 `_. Any +template that contains non-ASCII characters requires that this +comment be present so that Mako can decode to unicode (and also +make usage of Python's AST parsing services). Mako's lexer will +use this encoding in order to convert the template source into a +``unicode`` object before continuing its parsing: + +.. sourcecode:: mako + + ## -*- coding: utf-8 -*- + + Alors vous imaginez ma surprise, au lever du jour, quand + une dr??le de petite voix m???a r??veill??. Elle disait: + ?? S???il vous pla??t??? dessine-moi un mouton! ?? + +For the picky, the regular expression used is derived from that +of the above mentioned pep: + +.. sourcecode:: python + + #.*coding[:=]\s*([-\w.]+).*\n + +The lexer will convert to unicode in all cases, so that if any +characters exist in the template that are outside of the +specified encoding (or the default of ``ascii``), the error will +be immediate. + +As an alternative, the template encoding can be specified +programmatically to either :class:`.Template` or :class:`.TemplateLookup` via +the ``input_encoding`` parameter: + +.. sourcecode:: python + + t = TemplateLookup(directories=['./'], input_encoding='utf-8') + +The above will assume all located templates specify ``utf-8`` +encoding, unless the template itself contains its own magic +encoding comment, which takes precedence. + +.. _handling_non_ascii_expressions: + +Handling Expressions +==================== + +The next area that encoding comes into play is in expression +constructs. By default, Mako's treatment of an expression like +this: + +.. sourcecode:: mako + + ${"hello world"} + +looks something like this: + +.. sourcecode:: python + + context.write(unicode("hello world")) + +In Python 3, it's just: + +.. sourcecode:: python + + context.write(str("hello world")) + +That is, **the output of all expressions is run through the +``unicode`` built-in**. This is the default setting, and can be +modified to expect various encodings. The ``unicode`` step serves +both the purpose of rendering non-string expressions into +strings (such as integers or objects which contain ``__str()__`` +methods), and to ensure that the final output stream is +constructed as a unicode object. The main implication of this is +that **any raw byte-strings that contain an encoding other than +ASCII must first be decoded to a Python unicode object**. It +means you can't say this in Python 2: + +.. sourcecode:: mako + + ${"voix m???a r??veill??."} ## error in Python 2! + +You must instead say this: + +.. sourcecode:: mako + + ${u"voix m???a r??veill??."} ## OK ! + +Similarly, if you are reading data from a file that is streaming +bytes, or returning data from some object that is returning a +Python byte-string containing a non-ASCII encoding, you have to +explicitly decode to unicode first, such as: + +.. sourcecode:: mako + + ${call_my_object().decode('utf-8')} + +Note that filehandles acquired by ``open()`` in Python 3 default +to returning "text", that is the decoding is done for you. See +Python 3's documentation for the ``open()`` built-in for details on +this. + +If you want a certain encoding applied to *all* expressions, +override the ``unicode`` builtin with the ``decode`` built-in at the +:class:`.Template` or :class:`.TemplateLookup` level: + +.. sourcecode:: python + + t = Template(templatetext, default_filters=['decode.utf8']) + +Note that the built-in ``decode`` object is slower than the +``unicode`` function, since unlike ``unicode`` it's not a Python +built-in, and it also checks the type of the incoming data to +determine if string conversion is needed first. + +The ``default_filters`` argument can be used to entirely customize +the filtering process of expressions. This argument is described +in :ref:`filtering_default_filters`. + +.. _defining_output_encoding: + +Defining Output Encoding +======================== + +Now that we have a template which produces a pure unicode output +stream, all the hard work is done. We can take the output and do +anything with it. + +As stated in the :doc:`"Usage" chapter `, both :class:`.Template` and +:class:`.TemplateLookup` accept ``output_encoding`` and ``encoding_errors`` +parameters which can be used to encode the output in any Python +supported codec: + +.. sourcecode:: python + + from mako.template import Template + from mako.lookup import TemplateLookup + + mylookup = TemplateLookup(directories=['/docs'], output_encoding='utf-8', encoding_errors='replace') + + mytemplate = mylookup.get_template("foo.txt") + print mytemplate.render() + +:meth:`~.Template.render` will return a ``bytes`` object in Python 3 if an output +encoding is specified. By default it performs no encoding and +returns a native string. + +:meth:`~.Template.render_unicode` will return the template output as a Python +``unicode`` object (or ``string`` in Python 3): + +.. sourcecode:: python + + print mytemplate.render_unicode() + +The above method disgards the output encoding keyword argument; +you can encode yourself by saying: + +.. sourcecode:: python + + print mytemplate.render_unicode().encode('utf-8', 'replace') + +Buffer Selection +---------------- + +Mako does play some games with the style of buffering used +internally, to maximize performance. Since the buffer is by far +the most heavily used object in a render operation, it's +important! + +When calling :meth:`~.Template.render` on a template that does not specify any +output encoding (i.e. it's ``ascii``), Python's ``cStringIO`` module, +which cannot handle encoding of non-ASCII ``unicode`` objects +(even though it can send raw byte-strings through), is used for +buffering. Otherwise, a custom Mako class called +``FastEncodingBuffer`` is used, which essentially is a super +dumbed-down version of ``StringIO`` that gathers all strings into +a list and uses ``u''.join(elements)`` to produce the final output +-- it's markedly faster than ``StringIO``. + +.. _unicode_disabled: + +Saying to Heck with It: Disabling the Usage of Unicode Entirely +=============================================================== + +Some segments of Mako's userbase choose to make no usage of +Unicode whatsoever, and instead would prefer the "pass through" +approach; all string expressions in their templates return +encoded byte-strings, and they would like these strings to pass +right through. The only advantage to this approach is that +templates need not use ``u""`` for literal strings; there's an +arguable speed improvement as well since raw byte-strings +generally perform slightly faster than unicode objects in +Python. For these users, assuming they're sticking with Python +2, they can hit the ``disable_unicode=True`` flag as so: + +.. sourcecode:: python + + # -*- encoding:utf-8 -*- + from mako.template import Template + + t = Template("dr??le de petite voix m???a r??veill??.", disable_unicode=True, input_encoding='utf-8') + print t.code + +The ``disable_unicode`` mode is strictly a Python 2 thing. It is +not supported at all in Python 3. + +The generated module source code will contain elements like +these: + +.. sourcecode:: python + + # -*- encoding:utf-8 -*- + # ...more generated code ... + + def render_body(context,**pageargs): + context.caller_stack.push_frame() + try: + __M_locals = dict(pageargs=pageargs) + # SOURCE LINE 1 + context.write('dr\xc3\xb4le de petite voix m\xe2\x80\x99a r\xc3\xa9veill\xc3\xa9.') + return '' + finally: + context.caller_stack.pop_frame() + +Where above that the string literal used within :meth:`.Context.write` +is a regular byte-string. + +When ``disable_unicode=True`` is turned on, the ``default_filters`` +argument which normally defaults to ``["unicode"]`` now defaults +to ``["str"]`` instead. Setting ``default_filters`` to the empty list +``[]`` can remove the overhead of the ``str`` call. Also, in this +mode you **cannot** safely call :meth:`~.Template.render_unicode` -- you'll get +unicode/decode errors. + +The ``h`` filter (HTML escape) uses a less performant pure Python +escape function in non-unicode mode. This because +MarkupSafe only supports Python unicode objects for non-ASCII +strings. + +.. versionchanged:: 0.3.4 + In prior versions, it used ``cgi.escape()``, which has been replaced + with a function that also escapes single quotes. + +Rules for using ``disable_unicode=True`` +---------------------------------------- + +* Don't use this mode unless you really, really want to and you + absolutely understand what you're doing. +* Don't use this option just because you don't want to learn to + use Unicode properly; we aren't supporting user issues in this + mode of operation. We will however offer generous help for the + vast majority of users who stick to the Unicode program. +* Python 3 is unicode by default, and the flag is not available + when running on Python 3. + diff --git a/lib3/Mako-0.7.3/doc/_sources/usage.txt b/lib3/Mako-0.7.3/doc/_sources/usage.txt new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_sources/usage.txt @@ -0,0 +1,520 @@ +.. _usage_toplevel: + +===== +Usage +===== + +Basic Usage +=========== + +This section describes the Python API for Mako templates. If you +are using Mako within a web framework such as Pylons, the work +of integrating Mako's API is already done for you, in which case +you can skip to the next section, :ref:`syntax_toplevel`. + +The most basic way to create a template and render it is through +the :class:`.Template` class: + +.. sourcecode:: python + + from mako.template import Template + + mytemplate = Template("hello world!") + print mytemplate.render() + +Above, the text argument to :class:`.Template` is **compiled** into a +Python module representation. This module contains a function +called ``render_body()``, which produces the output of the +template. When ``mytemplate.render()`` is called, Mako sets up a +runtime environment for the template and calls the +``render_body()`` function, capturing the output into a buffer and +returning its string contents. + + +The code inside the ``render_body()`` function has access to a +namespace of variables. You can specify these variables by +sending them as additional keyword arguments to the :meth:`~.Template.render` +method: + +.. sourcecode:: python + + from mako.template import Template + + mytemplate = Template("hello, ${name}!") + print mytemplate.render(name="jack") + +The :meth:`~.Template.render` method calls upon Mako to create a +:class:`.Context` object, which stores all the variable names accessible +to the template and also stores a buffer used to capture output. +You can create this :class:`.Context` yourself and have the template +render with it, using the :meth:`~.Template.render_context` method: + +.. sourcecode:: python + + from mako.template import Template + from mako.runtime import Context + from StringIO import StringIO + + mytemplate = Template("hello, ${name}!") + buf = StringIO() + ctx = Context(buf, name="jack") + mytemplate.render_context(ctx) + print buf.getvalue() + +Using File-Based Templates +========================== + +A :class:`.Template` can also load its template source code from a file, +using the ``filename`` keyword argument: + +.. sourcecode:: python + + from mako.template import Template + + mytemplate = Template(filename='/docs/mytmpl.txt') + print mytemplate.render() + +For improved performance, a :class:`.Template` which is loaded from a +file can also cache the source code to its generated module on +the filesystem as a regular Python module file (i.e. a ``.py`` +file). To do this, just add the ``module_directory`` argument to +the template: + +.. sourcecode:: python + + from mako.template import Template + + mytemplate = Template(filename='/docs/mytmpl.txt', module_directory='/tmp/mako_modules') + print mytemplate.render() + +When the above code is rendered, a file +``/tmp/mako_modules/docs/mytmpl.txt.py`` is created containing the +source code for the module. The next time a :class:`.Template` with the +same arguments is created, this module file will be +automatically re-used. + +.. _usage_templatelookup: + +Using ``TemplateLookup`` +======================== + +All of the examples thus far have dealt with the usage of a +single :class:`.Template` object. If the code within those templates +tries to locate another template resource, it will need some way +to find them, using simple URI strings. For this need, the +resolution of other templates from within a template is +accomplished by the :class:`.TemplateLookup` class. This class is +constructed given a list of directories in which to search for +templates, as well as keyword arguments that will be passed to +the :class:`.Template` objects it creates: + +.. sourcecode:: python + + from mako.template import Template + from mako.lookup import TemplateLookup + + mylookup = TemplateLookup(directories=['/docs']) + mytemplate = Template("""<%include file="header.txt"/> hello world!""", lookup=mylookup) + +Above, we created a textual template which includes the file +``"header.txt"``. In order for it to have somewhere to look for +``"header.txt"``, we passed a :class:`.TemplateLookup` object to it, which +will search in the directory ``/docs`` for the file ``"header.txt"``. + +Usually, an application will store most or all of its templates +as text files on the filesystem. So far, all of our examples +have been a little bit contrived in order to illustrate the +basic concepts. But a real application would get most or all of +its templates directly from the :class:`.TemplateLookup`, using the +aptly named :meth:`~.TemplateLookup.get_template` method, which accepts the URI of the +desired template: + +.. sourcecode:: python + + from mako.template import Template + from mako.lookup import TemplateLookup + + mylookup = TemplateLookup(directories=['/docs'], module_directory='/tmp/mako_modules') + + def serve_template(templatename, **kwargs): + mytemplate = mylookup.get_template(templatename) + print mytemplate.render(**kwargs) + +In the example above, we create a :class:`.TemplateLookup` which will +look for templates in the ``/docs`` directory, and will store +generated module files in the ``/tmp/mako_modules`` directory. The +lookup locates templates by appending the given URI to each of +its search directories; so if you gave it a URI of +``/etc/beans/info.txt``, it would search for the file +``/docs/etc/beans/info.txt``, else raise a :class:`.TopLevelNotFound` +exception, which is a custom Mako exception. + +When the lookup locates templates, it will also assign a ``uri`` +property to the :class:`.Template` which is the URI passed to the +:meth:`~.TemplateLookup.get_template()` call. :class:`.Template` uses this URI to calculate the +name of its module file. So in the above example, a +``templatename`` argument of ``/etc/beans/info.txt`` will create a +module file ``/tmp/mako_modules/etc/beans/info.txt.py``. + +Setting the Collection Size +--------------------------- + +The :class:`.TemplateLookup` also serves the important need of caching a +fixed set of templates in memory at a given time, so that +successive URI lookups do not result in full template +compilations and/or module reloads on each request. By default, +the :class:`.TemplateLookup` size is unbounded. You can specify a fixed +size using the ``collection_size`` argument: + +.. sourcecode:: python + + mylookup = TemplateLookup(directories=['/docs'], + module_directory='/tmp/mako_modules', collection_size=500) + +The above lookup will continue to load templates into memory +until it reaches a count of around 500. At that point, it will +clean out a certain percentage of templates using a least +recently used scheme. + +Setting Filesystem Checks +------------------------- + +Another important flag on :class:`.TemplateLookup` is +``filesystem_checks``. This defaults to ``True``, and says that each +time a template is returned by the :meth:`~.TemplateLookup.get_template()` method, the +revision time of the original template file is checked against +the last time the template was loaded, and if the file is newer +will reload its contents and recompile the template. On a +production system, setting ``filesystem_checks`` to ``False`` can +afford a small to moderate performance increase (depending on +the type of filesystem used). + +.. _usage_unicode: + +Using Unicode and Encoding +========================== + +Both :class:`.Template` and :class:`.TemplateLookup` accept ``output_encoding`` +and ``encoding_errors`` parameters which can be used to encode the +output in any Python supported codec: + +.. sourcecode:: python + + from mako.template import Template + from mako.lookup import TemplateLookup + + mylookup = TemplateLookup(directories=['/docs'], output_encoding='utf-8', encoding_errors='replace') + + mytemplate = mylookup.get_template("foo.txt") + print mytemplate.render() + +When using Python 3, the :meth:`~.Template.render` method will return a ``bytes`` +object, **if** ``output_encoding`` is set. Otherwise it returns a +``string``. + +Additionally, the :meth:`~.Template.render_unicode()` method exists which will +return the template output as a Python ``unicode`` object, or in +Python 3 a ``string``: + +.. sourcecode:: python + + print mytemplate.render_unicode() + +The above method disregards the output encoding keyword +argument; you can encode yourself by saying: + +.. sourcecode:: python + + print mytemplate.render_unicode().encode('utf-8', 'replace') + +Note that Mako's ability to return data in any encoding and/or +``unicode`` implies that the underlying output stream of the +template is a Python unicode object. This behavior is described +fully in :ref:`unicode_toplevel`. + +.. _handling_exceptions: + +Handling Exceptions +=================== + +Template exceptions can occur in two distinct places. One is +when you **lookup, parse and compile** the template, the other +is when you **run** the template. Within the running of a +template, exceptions are thrown normally from whatever Python +code originated the issue. Mako has its own set of exception +classes which mostly apply to the lookup and lexer/compiler +stages of template construction. Mako provides some library +routines that can be used to help provide Mako-specific +information about any exception's stack trace, as well as +formatting the exception within textual or HTML format. In all +cases, the main value of these handlers is that of converting +Python filenames, line numbers, and code samples into Mako +template filenames, line numbers, and code samples. All lines +within a stack trace which correspond to a Mako template module +will be converted to be against the originating template file. + +To format exception traces, the :func:`.text_error_template` and +:func:`.html_error_template` functions are provided. They make usage of +``sys.exc_info()`` to get at the most recently thrown exception. +Usage of these handlers usually looks like: + +.. sourcecode:: python + + from mako import exceptions + + try: + template = lookup.get_template(uri) + print template.render() + except: + print exceptions.text_error_template().render() + +Or for the HTML render function: + +.. sourcecode:: python + + from mako import exceptions + + try: + template = lookup.get_template(uri) + print template.render() + except: + print exceptions.html_error_template().render() + +The :func:`.html_error_template` template accepts two options: +specifying ``full=False`` causes only a section of an HTML +document to be rendered. Specifying ``css=False`` will disable the +default stylesheet from being rendered. + +E.g.: + +.. sourcecode:: python + + print exceptions.html_error_template().render(full=False) + +The HTML render function is also available built-in to +:class:`.Template` using the ``format_exceptions`` flag. In this case, any +exceptions raised within the **render** stage of the template +will result in the output being substituted with the output of +:func:`.html_error_template`: + +.. sourcecode:: python + + template = Template(filename="/foo/bar", format_exceptions=True) + print template.render() + +Note that the compile stage of the above template occurs when +you construct the :class:`.Template` itself, and no output stream is +defined. Therefore exceptions which occur within the +lookup/parse/compile stage will not be handled and will +propagate normally. While the pre-render traceback usually will +not include any Mako-specific lines anyway, it will mean that +exceptions which occur previous to rendering and those which +occur within rendering will be handled differently... so the +``try``/``except`` patterns described previously are probably of more +general use. + +The underlying object used by the error template functions is +the :class:`.RichTraceback` object. This object can also be used +directly to provide custom error views. Here's an example usage +which describes its general API: + +.. sourcecode:: python + + from mako.exceptions import RichTraceback + + try: + template = lookup.get_template(uri) + print template.render() + except: + traceback = RichTraceback() + for (filename, lineno, function, line) in traceback.traceback: + print "File %s, line %s, in %s" % (filename, lineno, function) + print line, "\n" + print "%s: %s" % (str(traceback.error.__class__.__name__), traceback.error) + +Common Framework Integrations +============================= + +The Mako distribution includes a little bit of helper code for +the purpose of using Mako in some popular web framework +scenarios. This is a brief description of what's included. + +WSGI +---- + +A sample WSGI application is included in the distribution in the +file ``examples/wsgi/run_wsgi.py``. This runner is set up to pull +files from a `templates` as well as an `htdocs` directory and +includes a rudimental two-file layout. The WSGI runner acts as a +fully functional standalone web server, using ``wsgiutils`` to run +itself, and propagates GET and POST arguments from the request +into the :class:`.Context`, can serve images, CSS files and other kinds +of files, and also displays errors using Mako's included +exception-handling utilities. + +Pygments +-------- + +A `Pygments `_-compatible syntax +highlighting module is included under :mod:`mako.ext.pygmentplugin`. +This module is used in the generation of Mako documentation and +also contains various `setuptools` entry points under the heading +``pygments.lexers``, including ``mako``, ``html+mako``, ``xml+mako`` +(see the ``setup.py`` file for all the entry points). + +Babel +----- + +Mako provides support for extracting `gettext` messages from +templates via a `Babel`_ extractor +entry point under ``mako.ext.babelplugin``. + +`Gettext` messages are extracted from all Python code sections, +including those of control lines and expressions embedded +in tags. + +`Translator +comments `_ +may also be extracted from Mako templates when a comment tag is +specified to `Babel`_ (such as with +the ``-c`` option). + +For example, a project ``"myproj"`` contains the following Mako +template at ``myproj/myproj/templates/name.html``: + +.. sourcecode:: mako + +
+ Name: + ## TRANSLATORS: This is a proper name. See the gettext + ## manual, section Names. + ${_('Francois Pinard')} +
+ +To extract gettext messages from this template the project needs +a Mako section in its `Babel Extraction Method Mapping +file `_ +(typically located at ``myproj/babel.cfg``): + +.. sourcecode:: cfg + + # Extraction from Python source files + + [python: myproj/**.py] + + # Extraction from Mako templates + + [mako: myproj/templates/**.html] + input_encoding = utf-8 + +The Mako extractor supports an optional ``input_encoding`` +parameter specifying the encoding of the templates (identical to +:class:`.Template`/:class:`.TemplateLookup`'s ``input_encoding`` parameter). + +Invoking `Babel`_'s extractor at the +command line in the project's root directory: + +.. sourcecode:: sh + + myproj$ pybabel extract -F babel.cfg -c "TRANSLATORS:" . + +will output a `gettext` catalog to `stdout` including the following: + +.. sourcecode:: pot + + #. TRANSLATORS: This is a proper name. See the gettext + #. manual, section Names. + #: myproj/templates/name.html:5 + msgid "Francois Pinard" + msgstr "" + +This is only a basic example: +`Babel`_ can be invoked from ``setup.py`` +and its command line options specified in the accompanying +``setup.cfg`` via `Babel Distutils/Setuptools +Integration `_. + +Comments must immediately precede a `gettext` message to be +extracted. In the following case the ``TRANSLATORS:`` comment would +not have been extracted: + +.. sourcecode:: mako + +
+ ## TRANSLATORS: This is a proper name. See the gettext + ## manual, section Names. + Name: ${_('Francois Pinard')} +
+ +See the `Babel User +Guide `_ +for more information. + +.. _babel: http://babel.edgewall.org/ + + +API Reference +============= + +.. autoclass:: mako.template.Template + :show-inheritance: + :members: + +.. autoclass:: mako.template.DefTemplate + :show-inheritance: + :members: + +.. autoclass:: mako.lookup.TemplateCollection + :show-inheritance: + :members: + +.. autoclass:: mako.lookup.TemplateLookup + :show-inheritance: + :members: + +.. autoclass:: mako.exceptions.RichTraceback + :show-inheritance: + + .. py:attribute:: error + + the exception instance. + + .. py:attribute:: message + + the exception error message as unicode. + + .. py:attribute:: source + + source code of the file where the error occurred. + If the error occurred within a compiled template, + this is the template source. + + .. py:attribute:: lineno + + line number where the error occurred. If the error + occurred within a compiled template, the line number + is adjusted to that of the template source. + + .. py:attribute:: records + + a list of 8-tuples containing the original + python traceback elements, plus the + filename, line number, source line, and full template source + for the traceline mapped back to its originating source + template, if any for that traceline (else the fields are ``None``). + + .. py:attribute:: reverse_records + + the list of records in reverse + traceback -- a list of 4-tuples, in the same format as a regular + python traceback, with template-corresponding + traceback records replacing the originals. + + .. py:attribute:: reverse_traceback + + the traceback list in reverse. + +.. autofunction:: mako.exceptions.html_error_template + +.. autofunction:: mako.exceptions.text_error_template + diff --git a/lib3/Mako-0.7.3/doc/_static/basic.css b/lib3/Mako-0.7.3/doc/_static/basic.css new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_static/basic.css @@ -0,0 +1,540 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox input[type="text"] { + width: 170px; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + width: 30px; +} + +img { + border: 0; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li div.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable dl, table.indextable dd { + margin-top: 0; + margin-bottom: 0; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- general body styles --------------------------------------------------- */ + +a.headerlink { + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.field-list ul { + padding-left: 1em; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px 7px 0 7px; + background-color: #ffe; + width: 40%; + float: right; +} + +p.sidebar-title { + font-weight: bold; +} + +/* -- topics ---------------------------------------------------------------- */ + +div.topic { + border: 1px solid #ccc; + padding: 7px 7px 0 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +div.admonition dl { + margin-bottom: 0; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + border: 0; + border-collapse: collapse; +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +table.field-list td, table.field-list th { + border: 0 !important; +} + +table.footnote td, table.footnote th { + border: 0 !important; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +dl { + margin-bottom: 15px; +} + +dd p { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +dt:target, .highlighted { + background-color: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.refcount { + color: #060; +} + +.optional { + font-size: 1.3em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +td.linenos pre { + padding: 5px 0px; + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + margin-left: 0.5em; +} + +table.highlighttable td { + padding: 0 0.5em 0 0.5em; +} + +tt.descname { + background-color: transparent; + font-weight: bold; + font-size: 1.2em; +} + +tt.descclassname { + background-color: transparent; +} + +tt.xref, a tt { + background-color: transparent; + font-weight: bold; +} + +h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +/* -- printout stylesheet --------------------------------------------------- */ + + at media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/lib3/Mako-0.7.3/doc/_static/comment-bright.png b/lib3/Mako-0.7.3/doc/_static/comment-bright.png new file mode 100644 index 0000000000000000000000000000000000000000..551517b8c83b76f734ff791f847829a760ad1903 GIT binary patch [stripped] diff --git a/lib3/Mako-0.7.3/doc/_static/comment-close.png b/lib3/Mako-0.7.3/doc/_static/comment-close.png new file mode 100644 index 0000000000000000000000000000000000000000..09b54be46da3f0d4a5061da289dc91d8a2cdbc9c GIT binary patch [stripped] diff --git a/lib3/Mako-0.7.3/doc/_static/comment.png b/lib3/Mako-0.7.3/doc/_static/comment.png new file mode 100644 index 0000000000000000000000000000000000000000..92feb52b8824c6b0f59b658b1196c61de9162a95 GIT binary patch [stripped] diff --git a/lib3/Mako-0.7.3/doc/_static/default.css b/lib3/Mako-0.7.3/doc/_static/default.css new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_static/default.css @@ -0,0 +1,256 @@ +/* + * default.css_t + * ~~~~~~~~~~~~~ + * + * Sphinx stylesheet -- default theme. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + + at import url("basic.css"); + +/* -- page layout ----------------------------------------------------------- */ + +body { + font-family: sans-serif; + font-size: 100%; + background-color: #11303d; + color: #000; + margin: 0; + padding: 0; +} + +div.document { + background-color: #1c4e63; +} + +div.documentwrapper { + float: left; + width: 100%; +} + +div.bodywrapper { + margin: 0 0 0 230px; +} + +div.body { + background-color: #ffffff; + color: #000000; + padding: 0 20px 30px 20px; +} + +div.footer { + color: #ffffff; + width: 100%; + padding: 9px 0 9px 0; + text-align: center; + font-size: 75%; +} + +div.footer a { + color: #ffffff; + text-decoration: underline; +} + +div.related { + background-color: #133f52; + line-height: 30px; + color: #ffffff; +} + +div.related a { + color: #ffffff; +} + +div.sphinxsidebar { +} + +div.sphinxsidebar h3 { + font-family: 'Trebuchet MS', sans-serif; + color: #ffffff; + font-size: 1.4em; + font-weight: normal; + margin: 0; + padding: 0; +} + +div.sphinxsidebar h3 a { + color: #ffffff; +} + +div.sphinxsidebar h4 { + font-family: 'Trebuchet MS', sans-serif; + color: #ffffff; + font-size: 1.3em; + font-weight: normal; + margin: 5px 0 0 0; + padding: 0; +} + +div.sphinxsidebar p { + color: #ffffff; +} + +div.sphinxsidebar p.topless { + margin: 5px 10px 10px 10px; +} + +div.sphinxsidebar ul { + margin: 10px; + padding: 0; + color: #ffffff; +} + +div.sphinxsidebar a { + color: #98dbcc; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + + + +/* -- hyperlink styles ------------------------------------------------------ */ + +a { + color: #355f7c; + text-decoration: none; +} + +a:visited { + color: #355f7c; + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + + + +/* -- body styles ----------------------------------------------------------- */ + +div.body h1, +div.body h2, +div.body h3, +div.body h4, +div.body h5, +div.body h6 { + font-family: 'Trebuchet MS', sans-serif; + background-color: #f2f2f2; + font-weight: normal; + color: #20435c; + border-bottom: 1px solid #ccc; + margin: 20px -20px 10px -20px; + padding: 3px 0 3px 10px; +} + +div.body h1 { margin-top: 0; font-size: 200%; } +div.body h2 { font-size: 160%; } +div.body h3 { font-size: 140%; } +div.body h4 { font-size: 120%; } +div.body h5 { font-size: 110%; } +div.body h6 { font-size: 100%; } + +a.headerlink { + color: #c60f0f; + font-size: 0.8em; + padding: 0 4px 0 4px; + text-decoration: none; +} + +a.headerlink:hover { + background-color: #c60f0f; + color: white; +} + +div.body p, div.body dd, div.body li { + text-align: justify; + line-height: 130%; +} + +div.admonition p.admonition-title + p { + display: inline; +} + +div.admonition p { + margin-bottom: 5px; +} + +div.admonition pre { + margin-bottom: 5px; +} + +div.admonition ul, div.admonition ol { + margin-bottom: 5px; +} + +div.note { + background-color: #eee; + border: 1px solid #ccc; +} + +div.seealso { + background-color: #ffc; + border: 1px solid #ff6; +} + +div.topic { + background-color: #eee; +} + +div.warning { + background-color: #ffe4e4; + border: 1px solid #f66; +} + +p.admonition-title { + display: inline; +} + +p.admonition-title:after { + content: ":"; +} + +pre { + padding: 5px; + background-color: #eeffcc; + color: #333333; + line-height: 120%; + border: 1px solid #ac9; + border-left: none; + border-right: none; +} + +tt { + background-color: #ecf0f3; + padding: 0 1px 0 1px; + font-size: 0.95em; +} + +th { + background-color: #ede; +} + +.warning tt { + background: #efc2c2; +} + +.note tt { + background: #d6d6d6; +} + +.viewcode-back { + font-family: sans-serif; +} + +div.viewcode-block:target { + background-color: #f4debf; + border-top: 1px solid #ac9; + border-bottom: 1px solid #ac9; +} \ No newline at end of file diff --git a/lib3/Mako-0.7.3/doc/_static/docs.css b/lib3/Mako-0.7.3/doc/_static/docs.css new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_static/docs.css @@ -0,0 +1,438 @@ +/* global */ + +body { + background-color: #FDFBFC; + margin:38px; + color:#333333; +} + +a { + font-weight:normal; + text-decoration:none; +} + +form { + display:inline; +} + +/* hyperlinks */ + +a:link, a:visited, a:active { + color:#0000FF; +} +a:hover { + color:#700000; + text-decoration:underline; +} + +/* paragraph links after sections. + These aren't visible until hovering + over the tag, then have a + "reverse video" effect over the actual + link + */ + +a.headerlink { + font-size: 0.8em; + padding: 0 4px 0 4px; + text-decoration: none; + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +a.headerlink:hover { + background-color: #990000; + color: white; +} + + +/* Container setup */ + +#docs-container { + max-width:1000px; +} + + +/* header/footer elements */ + +#docs-header h1 { + font-size:20px; + color: #222222; + margin: 0; + padding: 0; +} + +#docs-header { + font-family:Tahoma, Geneva,sans-serif; + + font-size:.9em; + +} + +#docs-top-navigation, +#docs-bottom-navigation { + font-family: Tahoma, Geneva, sans-serif; + background-color: #EEE; + border: solid 1px #CCC; + padding:10px; + font-size:.9em; +} + +#docs-top-navigation { + margin:10px 0px 10px 0px; + line-height:1.2em; +} + +.docs-navigation-links { + font-family:Tahoma, Geneva,sans-serif; +} + +#docs-bottom-navigation { + float:right; + margin: 1em 0 1em 5px; +} + +#docs-copyright { + font-size:.85em; + padding:5px 0px; +} + +#docs-header h1, +#docs-top-navigation h1, +#docs-top-navigation h2 { + font-family:Tahoma,Geneva,sans-serif; + font-weight:normal; +} + +#docs-top-navigation h2 { + margin:16px 4px 7px 5px; + font-size:2em; +} + +#docs-search { + float:right; +} + +#docs-top-page-control { + float:right; + width:350px; +} + +#docs-top-page-control ul { + padding:0; + margin:0; +} + +#docs-top-page-control li { + list-style-type:none; + padding:1px 8px; +} + + +#docs-container .version-num { + font-weight: bold; +} + + +/* content container, sidebar */ + +#docs-body-container { + background-color:#EFEFEF; + border: solid 1px #CCC; + +} + +#docs-body, +#docs-sidebar + { + /*font-family: helvetica, arial, sans-serif; + font-size:.9em;*/ + + font-family: Tahoma, Geneva, sans-serif; + /*font-size:.85em;*/ + line-height:1.5em; + +} + +#docs-sidebar > ul { + font-size:.9em; +} + +#docs-sidebar { + float:left; + width:212px; + padding: 10px 0 0 15px; + /*font-size:.85em;*/ +} + +#docs-sidebar h3, #docs-sidebar h4 { + background-color: #DDDDDD; + color: #222222; + font-family: Tahoma, Geneva,sans-serif; + font-size: 1.1em; + font-weight: normal; + margin: 10px 0 0 -15px; + padding: 5px 10px 5px 10px; + text-shadow: 1px 1px 0 white; + width:210px; +} + +#docs-sidebar h3 a, #docs-sidebar h4 a { + color: #222222; +} +#docs-sidebar ul { + margin: 10px 10px 10px 0px; + padding: 0; + list-style: none outside none; +} + + +#docs-sidebar ul ul { + margin-bottom: 0; + margin-top: 0; + list-style: square outside none; + margin-left: 20px; +} + +#docs-body { + background-color:#FFFFFF; + padding:1px 10px 10px 10px; +} + +#docs-body.withsidebar { + margin: 0 0 0 230px; + border-left:3px solid #DFDFDF; +} + +#docs-body h1, +#docs-body h2, +#docs-body h3, +#docs-body h4 { + font-family:Tahoma, Geneva, sans-serif; +} + +#docs-body h1 { + /* hide the

for each content section. */ + display:none; + font-size:1.8em; +} + +#docs-body h2 { + font-size:1.6em; +} + +#docs-body h3 { + font-size:1.4em; +} + +/* SQL popup, code styles */ + +.highlight { + background:none; +} + +#docs-container pre { + font-size:1.2em; +} + +#docs-container .pre { + font-size:1.1em; +} + +#docs-container pre { + background-color: #f0f0f0; + border: solid 1px #ccc; + box-shadow: 2px 2px 3px #DFDFDF; + padding:10px; + margin: 5px 0px 5px 0px; + overflow:auto; + line-height:1.3em; +} + +.popup_sql, .show_sql +{ + background-color: #FBFBEE; + padding:5px 10px; + margin:10px -5px; + border:1px dashed; +} + +/* the [SQL] links used to display SQL */ +#docs-container .sql_link +{ + font-weight:normal; + font-family: arial, sans-serif; + font-size:.9em; + text-transform: uppercase; + color:#990000; + border:1px solid; + padding:1px 2px 1px 2px; + margin:0px 10px 0px 15px; + float:right; + line-height:1.2em; +} + +#docs-container a.sql_link, +#docs-container .sql_link +{ + text-decoration: none; + padding:1px 2px; +} + +#docs-container a.sql_link:hover { + text-decoration: none; + color:#fff; + border:1px solid #900; + background-color: #900; +} + +/* docutils-specific elements */ + +th.field-name { + text-align:right; +} + +div.note, div.warning, p.deprecated, div.topic { + background-color:#EEFFEF; +} + + +div.admonition, div.topic, p.deprecated, p.versionadded, p.versionchanged { + border:1px solid #CCCCCC; + padding:5px 10px; + font-size:.9em; + box-shadow: 2px 2px 3px #DFDFDF; +} + +div.warning .admonition-title { + color:#FF0000; +} + +div.admonition .admonition-title, div.topic .topic-title { + font-weight:bold; +} + +.viewcode-back, .viewcode-link { + float:right; +} + +dl.function > dt, +dl.attribute > dt, +dl.classmethod > dt, +dl.method > dt, +dl.class > dt, +dl.exception > dt +{ + background-color:#F0F0F0; + margin:25px -10px 10px 10px; + padding: 0px 10px; +} + +p.versionadded span.versionmodified, +p.versionchanged span.versionmodified, +p.deprecated span.versionmodified { + background-color: #F0F0F0; + font-style: italic; +} + +dt:target, span.highlight { + background-color:#FBE54E; +} + +a.headerlink { + font-size: 0.8em; + padding: 0 4px 0 4px; + text-decoration: none; + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +a.headerlink:hover { + background-color: #00f; + color: white; +} + +.clearboth { + clear:both; +} + +tt.descname { + background-color:transparent; + font-size:1.2em; + font-weight:bold; +} + +tt.descclassname { + background-color:transparent; +} + +tt { + background-color:#ECF0F3; + padding:0 1px; +} + +/* syntax highlighting overrides */ +.k, .kn {color:#0908CE;} +.o {color:#BF0005;} +.go {color:#804049;} + + +/* special "index page" sections + with specific formatting +*/ + +div#sqlalchemy-documentation { + font-size:.95em; +} +div#sqlalchemy-documentation em { + font-style:normal; +} +div#sqlalchemy-documentation .rubric{ + font-size:14px; + background-color:#EEFFEF; + padding:5px; + border:1px solid #BFBFBF; +} +div#sqlalchemy-documentation a, div#sqlalchemy-documentation li { + padding:5px 0px; +} + +div#getting-started { + border-bottom:1px solid; +} + +div#sqlalchemy-documentation div#sqlalchemy-orm { + float:left; + width:48%; +} + +div#sqlalchemy-documentation div#sqlalchemy-core { + float:left; + width:48%; + margin:0; + padding-left:10px; + border-left:1px solid; +} + +div#dialect-documentation { + border-top:1px solid; + /*clear:left;*/ +} diff --git a/lib3/Mako-0.7.3/doc/_static/doctools.js b/lib3/Mako-0.7.3/doc/_static/doctools.js new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_static/doctools.js @@ -0,0 +1,247 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for all documentation. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/** + * select a different prefix for underscore + */ +$u = _.noConflict(); + +/** + * make the code below compatible with browsers without + * an installed firebug like debugger +if (!window.console || !console.firebug) { + var names = ["log", "debug", "info", "warn", "error", "assert", "dir", + "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", + "profile", "profileEnd"]; + window.console = {}; + for (var i = 0; i < names.length; ++i) + window.console[names[i]] = function() {}; +} + */ + +/** + * small helper function to urldecode strings + */ +jQuery.urldecode = function(x) { + return decodeURIComponent(x).replace(/\+/g, ' '); +} + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s == 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * small function to check if an array contains + * a given item. + */ +jQuery.contains = function(arr, item) { + for (var i = 0; i < arr.length; i++) { + if (arr[i] == item) + return true; + } + return false; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node) { + if (node.nodeType == 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) { + var span = document.createElement("span"); + span.className = className; + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this); + }); + } + } + return this.each(function() { + highlight(this); + }); +}; + +/** + * Small JavaScript module for the documentation. + */ +var Documentation = { + + init : function() { + this.fixFirefoxAnchorBug(); + this.highlightSearchWords(); + this.initIndexTable(); + }, + + /** + * i18n support + */ + TRANSLATIONS : {}, + PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, + LOCALE : 'unknown', + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext : function(string) { + var translated = Documentation.TRANSLATIONS[string]; + if (typeof translated == 'undefined') + return string; + return (typeof translated == 'string') ? translated : translated[0]; + }, + + ngettext : function(singular, plural, n) { + var translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated == 'undefined') + return (n == 1) ? singular : plural; + return translated[Documentation.PLURALEXPR(n)]; + }, + + addTranslations : function(catalog) { + for (var key in catalog.messages) + this.TRANSLATIONS[key] = catalog.messages[key]; + this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); + this.LOCALE = catalog.locale; + }, + + /** + * add context elements like header anchor links + */ + addContextElements : function() { + $('div[id] > :header:first').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this headline')). + appendTo(this); + }); + $('dt[id]').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this definition')). + appendTo(this); + }); + }, + + /** + * workaround a firefox stupidity + */ + fixFirefoxAnchorBug : function() { + if (document.location.hash && $.browser.mozilla) + window.setTimeout(function() { + document.location.href += ''; + }, 10); + }, + + /** + * highlight the search words provided in the url in the text + */ + highlightSearchWords : function() { + var params = $.getQueryParameters(); + var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; + if (terms.length) { + var body = $('div.body'); + window.setTimeout(function() { + $.each(terms, function() { + body.highlightText(this.toLowerCase(), 'highlighted'); + }); + }, 10); + $('') + .appendTo($('#searchbox')); + } + }, + + /** + * init the domain index toggle buttons + */ + initIndexTable : function() { + var togglers = $('img.toggler').click(function() { + var src = $(this).attr('src'); + var idnum = $(this).attr('id').substr(7); + $('tr.cg-' + idnum).toggle(); + if (src.substr(-9) == 'minus.png') + $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); + else + $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); + }).css('display', ''); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { + togglers.click(); + } + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords : function() { + $('#searchbox .highlight-link').fadeOut(300); + $('span.highlighted').removeClass('highlighted'); + }, + + /** + * make the url absolute + */ + makeURL : function(relativeURL) { + return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; + }, + + /** + * get the current relative url + */ + getCurrentURL : function() { + var path = document.location.pathname; + var parts = path.split(/\//); + $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { + if (this == '..') + parts.pop(); + }); + var url = parts.join('/'); + return path.substring(url.lastIndexOf('/') + 1, path.length - 1); + } +}; + +// quick alias for translations +_ = Documentation.gettext; + +$(document).ready(function() { + Documentation.init(); +}); diff --git a/lib3/Mako-0.7.3/doc/_static/down-pressed.png b/lib3/Mako-0.7.3/doc/_static/down-pressed.png new file mode 100644 index 0000000000000000000000000000000000000000..6f7ad782782e4f8e39b0c6e15c7344700cdd2527 GIT binary patch [stripped] diff --git a/lib3/Mako-0.7.3/doc/_static/down.png b/lib3/Mako-0.7.3/doc/_static/down.png new file mode 100644 index 0000000000000000000000000000000000000000..3003a88770de3977d47a2ba69893436a2860f9e7 GIT binary patch [stripped] diff --git a/lib3/Mako-0.7.3/doc/_static/file.png b/lib3/Mako-0.7.3/doc/_static/file.png new file mode 100644 index 0000000000000000000000000000000000000000..d18082e397e7e54f20721af768c4c2983258f1b4 GIT binary patch [stripped] diff --git a/lib3/Mako-0.7.3/doc/_static/jquery.js b/lib3/Mako-0.7.3/doc/_static/jquery.js new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_static/jquery.js @@ -0,0 +1,154 @@ +/*! + * jQuery JavaScript Library v1.4.2 + * http://jquery.com/ + * + * Copyright 2010, John Resig + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * Includes Sizzle.js + * http://sizzlejs.com/ + * Copyright 2010, The Dojo Foundation + * Released under the MIT, BSD, and GPL Licenses. + * + * Date: Sat Feb 13 22:33:48 2010 -0500 + */ +(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/, +Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&& +(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this, +a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b=== +"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this, +function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b
a"; +var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected, +parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent= +false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n= +s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true, +applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando]; +else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this, +a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b=== +w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i, +cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected= +c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed"); +a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g, +function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split("."); +k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a), +C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B=0){a.type= +e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&& +f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive; +if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data", +e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a, +"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a, +d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g, +e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift(); +t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D|| +g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()}, +CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m, +g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)}, +text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}}, +setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return hl[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h= +h[3];l=0;for(m=h.length;l=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m=== +"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g, +h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&& +q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML=""; +if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="

";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}(); +(function(){var g=s.createElement("div");g.innerHTML="
";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}: +function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f0)for(var j=d;j0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j= +{},i;if(f&&a.length){e=0;for(var o=a.length;e-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a=== +"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode", +d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")? +a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType=== +1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/"},F={option:[1,""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"],area:[1,"",""],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div
","
"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d= +c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this}, +wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})}, +prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b, +this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild); +return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja, +""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]); +return this}else{e=0;for(var j=d.length;e0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["", +""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]===""&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e= +c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]? +c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja= +function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter= +Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a, +"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f= +a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b= +a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=//gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!== +"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("
").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this}, +serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "), +function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href, +global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&& +e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)? +"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache=== +false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B= +false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since", +c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E|| +d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x); +g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status=== +1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b=== +"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional; +if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration=== +"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]|| +c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start; +this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now= +this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem, +e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b
"; +a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b); +c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a, +d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top- +f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset": +"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in +e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window); diff --git a/lib3/Mako-0.7.3/doc/_static/makoLogo.png b/lib3/Mako-0.7.3/doc/_static/makoLogo.png new file mode 100644 index 0000000000000000000000000000000000000000..c43c087eb48ebfc2223b76cf3df2fa7868c2a72b GIT binary patch [stripped] diff --git a/lib3/Mako-0.7.3/doc/_static/minus.png b/lib3/Mako-0.7.3/doc/_static/minus.png new file mode 100644 index 0000000000000000000000000000000000000000..da1c5620d10c047525a467a425abe9ff5269cfc2 GIT binary patch [stripped] diff --git a/lib3/Mako-0.7.3/doc/_static/plus.png b/lib3/Mako-0.7.3/doc/_static/plus.png new file mode 100644 index 0000000000000000000000000000000000000000..b3cb37425ea68b39ffa7b2e5fb69161275a87541 GIT binary patch [stripped] diff --git a/lib3/Mako-0.7.3/doc/_static/pygments.css b/lib3/Mako-0.7.3/doc/_static/pygments.css new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_static/pygments.css @@ -0,0 +1,62 @@ +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #303030 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0040D0 } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/lib3/Mako-0.7.3/doc/_static/searchtools.js b/lib3/Mako-0.7.3/doc/_static/searchtools.js new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/_static/searchtools.js @@ -0,0 +1,560 @@ +/* + * searchtools.js_t + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilties for the full-text search. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words, hlwords is the list of normal, unstemmed + * words. the first one is used to find the occurance, the + * latter for highlighting it. + */ + +jQuery.makeSearchSummary = function(text, keywords, hlwords) { + var textLower = text.toLowerCase(); + var start = 0; + $.each(keywords, function() { + var i = textLower.indexOf(this.toLowerCase()); + if (i > -1) + start = i; + }); + start = Math.max(start - 120, 0); + var excerpt = ((start > 0) ? '...' : '') + + $.trim(text.substr(start, 240)) + + ((start + 240 - text.length) ? '...' : ''); + var rv = $('
').text(excerpt); + $.each(hlwords, function() { + rv = rv.highlightText(this, 'highlighted'); + }); + return rv; +} + + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + + +/** + * Search Module + */ +var Search = { + + _index : null, + _queued_query : null, + _pulse_status : -1, + + init : function() { + var params = $.getQueryParameters(); + if (params.q) { + var query = params.q[0]; + $('input[name="q"]')[0].value = query; + this.performSearch(query); + } + }, + + loadIndex : function(url) { + $.ajax({type: "GET", url: url, data: null, success: null, + dataType: "script", cache: true}); + }, + + setIndex : function(index) { + var q; + this._index = index; + if ((q = this._queued_query) !== null) { + this._queued_query = null; + Search.query(q); + } + }, + + hasIndex : function() { + return this._index !== null; + }, + + deferQuery : function(query) { + this._queued_query = query; + }, + + stopPulse : function() { + this._pulse_status = 0; + }, + + startPulse : function() { + if (this._pulse_status >= 0) + return; + function pulse() { + Search._pulse_status = (Search._pulse_status + 1) % 4; + var dotString = ''; + for (var i = 0; i < Search._pulse_status; i++) + dotString += '.'; + Search.dots.text(dotString); + if (Search._pulse_status > -1) + window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something + */ + performSearch : function(query) { + // create the required interface elements + this.out = $('#search-results'); + this.title = $('

' + _('Searching') + '

').appendTo(this.out); + this.dots = $('').appendTo(this.title); + this.status = $('

').appendTo(this.out); + this.output = $('

'); + } + // Prettify the comment rating. + comment.pretty_rating = comment.rating + ' point' + + (comment.rating == 1 ? '' : 's'); + // Make a class (for displaying not yet moderated comments differently) + comment.css_class = comment.displayed ? '' : ' moderate'; + // Create a div for this comment. + var context = $.extend({}, opts, comment); + var div = $(renderTemplate(commentTemplate, context)); + + // If the user has voted on this comment, highlight the correct arrow. + if (comment.vote) { + var direction = (comment.vote == 1) ? 'u' : 'd'; + div.find('#' + direction + 'v' + comment.id).hide(); + div.find('#' + direction + 'u' + comment.id).show(); + } + + if (opts.moderator || comment.text != '[deleted]') { + div.find('a.reply').show(); + if (comment.proposal_diff) + div.find('#sp' + comment.id).show(); + if (opts.moderator && !comment.displayed) + div.find('#cm' + comment.id).show(); + if (opts.moderator || (opts.username == comment.username)) + div.find('#dc' + comment.id).show(); + } + return div; + } + + /** + * A simple template renderer. Placeholders such as <%id%> are replaced + * by context['id'] with items being escaped. Placeholders such as <#id#> + * are not escaped. + */ + function renderTemplate(template, context) { + var esc = $(document.createElement('div')); + + function handle(ph, escape) { + var cur = context; + $.each(ph.split('.'), function() { + cur = cur[this]; + }); + return escape ? esc.text(cur || "").html() : cur; + } + + return template.replace(/<([%#])([\w\.]*)\1>/g, function() { + return handle(arguments[2], arguments[1] == '%' ? true : false); + }); + } + + /** Flash an error message briefly. */ + function showError(message) { + $(document.createElement('div')).attr({'class': 'popup-error'}) + .append($(document.createElement('div')) + .attr({'class': 'error-message'}).text(message)) + .appendTo('body') + .fadeIn("slow") + .delay(2000) + .fadeOut("slow"); + } + + /** Add a link the user uses to open the comments popup. */ + $.fn.comment = function() { + return this.each(function() { + var id = $(this).attr('id').substring(1); + var count = COMMENT_METADATA[id]; + var title = count + ' comment' + (count == 1 ? '' : 's'); + var image = count > 0 ? opts.commentBrightImage : opts.commentImage; + var addcls = count == 0 ? ' nocomment' : ''; + $(this) + .append( + $(document.createElement('a')).attr({ + href: '#', + 'class': 'sphinx-comment-open' + addcls, + id: 'ao' + id + }) + .append($(document.createElement('img')).attr({ + src: image, + alt: 'comment', + title: title + })) + .click(function(event) { + event.preventDefault(); + show($(this).attr('id').substring(2)); + }) + ) + .append( + $(document.createElement('a')).attr({ + href: '#', + 'class': 'sphinx-comment-close hidden', + id: 'ah' + id + }) + .append($(document.createElement('img')).attr({ + src: opts.closeCommentImage, + alt: 'close', + title: 'close' + })) + .click(function(event) { + event.preventDefault(); + hide($(this).attr('id').substring(2)); + }) + ); + }); + }; + + var opts = { + processVoteURL: '/_process_vote', + addCommentURL: '/_add_comment', + getCommentsURL: '/_get_comments', + acceptCommentURL: '/_accept_comment', + deleteCommentURL: '/_delete_comment', + commentImage: '/static/_static/comment.png', + closeCommentImage: '/static/_static/comment-close.png', + loadingImage: '/static/_static/ajax-loader.gif', + commentBrightImage: '/static/_static/comment-bright.png', + upArrow: '/static/_static/up.png', + downArrow: '/static/_static/down.png', + upArrowPressed: '/static/_static/up-pressed.png', + downArrowPressed: '/static/_static/down-pressed.png', + voting: false, + moderator: false + }; + + if (typeof COMMENT_OPTIONS != "undefined") { + opts = jQuery.extend(opts, COMMENT_OPTIONS); + } + + var popupTemplate = '\ +
\ +

\ + Sort by:\ + best rated\ + newest\ + oldest\ +

\ +
Comments
\ +
\ + loading comments...
\ +
    \ +
    \ +

    Add a comment\ + (markup):

    \ +
    \ + reStructured text markup: *emph*, **strong**, \ + ``code``, \ + code blocks: :: and an indented block after blank line
    \ + \ + \ +

    \ + \ + Propose a change ▹\ + \ + \ + Propose a change ▿\ + \ +

    \ + \ + \ + \ + \ + \ +
    \ +
    '; + + var commentTemplate = '\ +
    \ +
    \ +
    \ + \ + \ + \ + \ + \ + \ +
    \ +
    \ + \ + \ + \ + \ + \ + \ +
    \ +
    \ +
    \ +

    \ + <%username%>\ + <%pretty_rating%>\ + <%time.delta%>\ +

    \ +
    <#text#>
    \ +

    \ + \ + reply ▿\ + proposal ▹\ + proposal ▿\ + \ + \ +

    \ +
    \
    +<#proposal_diff#>\
    +        
    \ +
      \ +
      \ +
      \ +
      \ +
      '; + + var replyTemplate = '\ +
    • \ +
      \ +
      \ + \ + \ + \ + \ + \ +
      \ +
      \ +
    • '; + + $(document).ready(function() { + init(); + }); +})(jQuery); + +$(document).ready(function() { + // add comment anchors for all paragraphs that are commentable + $('.sphinx-has-comment').comment(); + + // highlight search words in search results + $("div.context").each(function() { + var params = $.getQueryParameters(); + var terms = (params.q) ? params.q[0].split(/\s+/) : []; + var result = $(this); + $.each(terms, function() { + result.highlightText(this.toLowerCase(), 'highlighted'); + }); + }); + + // directly open comment window if requested + var anchor = document.location.hash; + if (anchor.substring(0, 9) == '#comment-') { + $('#ao' + anchor.substring(9)).click(); + document.location.hash = '#s' + anchor.substring(9); + } +}); diff --git a/lib3/Mako-0.7.3/doc/build/Makefile b/lib3/Mako-0.7.3/doc/build/Makefile new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/Makefile @@ -0,0 +1,137 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = output + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest dist-html site-mako + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dist-html same as html, but places files in /doc" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + -rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html -A mako_layout=html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dist-html: + $(SPHINXBUILD) -b html -A mako_layout=html $(ALLSPHINXOPTS) .. + @echo + @echo "Build finished. The HTML pages are in ../." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/SQLAlchemy.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/SQLAlchemy.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/SQLAlchemy" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/SQLAlchemy" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + cp texinputs/* $(BUILDDIR)/latex/ + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + make -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) . + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." diff --git a/lib3/Mako-0.7.3/doc/build/builder/__init__.py b/lib3/Mako-0.7.3/doc/build/builder/__init__.py new file mode 100644 diff --git a/lib3/Mako-0.7.3/doc/build/builder/builders.py b/lib3/Mako-0.7.3/doc/build/builder/builders.py new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/builder/builders.py @@ -0,0 +1,97 @@ +from sphinx.application import TemplateBridge +from sphinx.builders.html import StandaloneHTMLBuilder +from sphinx.highlighting import PygmentsBridge +from sphinx.jinja2glue import BuiltinTemplateLoader +from pygments import highlight +from pygments.lexer import RegexLexer, bygroups, using +from pygments.token import * +from pygments.filter import Filter, apply_filters +from pygments.lexers import PythonLexer, PythonConsoleLexer +from pygments.formatters import HtmlFormatter, LatexFormatter +import re +import os +from mako.lookup import TemplateLookup +from mako.template import Template +from mako.ext.pygmentplugin import MakoLexer + +rtd = os.environ.get('READTHEDOCS', None) == 'True' + +class MakoBridge(TemplateBridge): + def init(self, builder, *args, **kw): + self.jinja2_fallback = BuiltinTemplateLoader() + self.jinja2_fallback.init(builder, *args, **kw) + + builder.config.html_context['site_base'] = builder.config['site_base'] + + self.lookup = TemplateLookup( + directories=builder.config.templates_path, + imports=[ + "from builder import util" + ] + ) + + def render(self, template, context): + template = template.replace(".html", ".mako") + context['prevtopic'] = context.pop('prev', None) + context['nexttopic'] = context.pop('next', None) + + # RTD layout + if rtd: + # add variables if not present, such + # as if local test of READTHEDOCS variable + if 'MEDIA_URL' not in context: + context['MEDIA_URL'] = "http://media.readthedocs.org/" + if 'slug' not in context: + context['slug'] = "mako-test-slug" + if 'url' not in context: + context['url'] = "/some/test/url" + if 'current_version' not in context: + context['current_version'] = "some_version" + if 'versions' not in context: + context['versions'] = [('default', '/default/')] + + context['docs_base'] = "http://readthedocs.org" + context['toolbar'] = True + context['layout'] = "rtd_layout.mako" + context['pdf_url'] = "%spdf/%s/%s/%s.pdf" % ( + context['MEDIA_URL'], + context['slug'], + context['current_version'], + context['slug'] + ) + # local docs layout + else: + context['toolbar'] = False + context['docs_base'] = "/" + context['layout'] = "layout.mako" + + context.setdefault('_', lambda x:x) + return self.lookup.get_template(template).render_unicode(**context) + + def render_string(self, template, context): + # this is used for .js, .css etc. and we don't have + # local copies of that stuff here so use the jinja render. + return self.jinja2_fallback.render_string(template, context) + +class StripDocTestFilter(Filter): + def filter(self, lexer, stream): + for ttype, value in stream: + if ttype is Token.Comment and re.match(r'#\s*doctest:', value): + continue + yield ttype, value + + +def autodoc_skip_member(app, what, name, obj, skip, options): + if what == 'class' and skip and name == '__init__': + return False + else: + return skip + +def setup(app): +# app.connect('autodoc-skip-member', autodoc_skip_member) + # Mako is already in Pygments, adding the local + # lexer here so that the latest syntax is available + app.add_lexer('mako', MakoLexer()) + app.add_config_value('site_base', "", True) + + \ No newline at end of file diff --git a/lib3/Mako-0.7.3/doc/build/builder/util.py b/lib3/Mako-0.7.3/doc/build/builder/util.py new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/builder/util.py @@ -0,0 +1,12 @@ +import re + +def striptags(text): + return re.compile(r'<[^>]*>').sub('', text) + +def go(m): + # .html with no anchor if present, otherwise "#" for top of page + return m.group(1) or '#' + +def strip_toplevel_anchors(text): + return re.compile(r'(\.html)?#[-\w]+-toplevel').sub(go, text) + diff --git a/lib3/Mako-0.7.3/doc/build/caching.rst b/lib3/Mako-0.7.3/doc/build/caching.rst new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/caching.rst @@ -0,0 +1,393 @@ +.. _caching_toplevel: + +======= +Caching +======= + +Any template or component can be cached using the ``cache`` +argument to the ``<%page>``, ``<%def>`` or ``<%block>`` directives: + +.. sourcecode:: mako + + <%page cached="True"/> + + template text + +The above template, after being executed the first time, will +store its content within a cache that by default is scoped +within memory. Subsequent calls to the template's :meth:`~.Template.render` +method will return content directly from the cache. When the +:class:`.Template` object itself falls out of scope, its corresponding +cache is garbage collected along with the template. + +By default, caching requires that the `Beaker `_ package be installed on the +system, however the mechanism of caching can be customized to use +any third party or user defined system -- see :ref:`cache_plugins`. + +In addition to being available on the ``<%page>`` tag, the caching flag and all +its options can be used with the ``<%def>`` tag as well: + +.. sourcecode:: mako + + <%def name="mycomp" cached="True" cache_timeout="60"> + other text + + +... and equivalently with the ``<%block>`` tag, anonymous or named: + +.. sourcecode:: mako + + <%block cached="True" cache_timeout="60"> + other text + + +Cache Arguments +=============== + +Mako has two cache arguments available on tags that are +available in all cases. The rest of the arguments +available are specific to a backend. + +The two generic tags arguments are: + +* ``cached="True"`` - enable caching for this ``<%page>``, + ``<%def>``, or ``<%block>``. +* ``cache_key`` - the "key" used to uniquely identify this content + in the cache. Usually, this key is chosen automatically + based on the name of the rendering callable (i.e. ``body`` + when used in ``<%page>``, the name of the def when using ``<%def>``, + the explicit or internally-generated name when using ``<%block>``). + Using the ``cache_key`` parameter, the key can be overridden + using a fixed or programmatically generated value. + + For example, here's a page + that caches any page which inherits from it, based on the + filename of the calling template: + + .. sourcecode:: mako + + <%page cached="True" cache_key="${self.filename}"/> + + ${next.body()} + + ## rest of template + +On a :class:`.Template` or :class:`.TemplateLookup`, the +caching can be configured using these arguments: + +* ``cache_enabled`` - Setting this + to ``False`` will disable all caching functionality + when the template renders. Defaults to ``True``. + e.g.: + + .. sourcecode:: python + + lookup = TemplateLookup( + directories='/path/to/templates', + cache_enabled = False + ) + +* ``cache_impl`` - The string name of the cache backend + to use. This defaults to ``'beaker'``, which has historically + been the only cache backend supported by Mako. + + .. versionadded:: 0.6.0 + + For example, here's how to use the upcoming + `dogpile.cache `_ + backend: + + .. sourcecode:: python + + lookup = TemplateLookup( + directories='/path/to/templates', + cache_impl = 'dogpile.cache', + cache_args = {'regions':my_dogpile_regions} + ) + +* ``cache_args`` - A dictionary of cache parameters that + will be consumed by the cache backend. See + :ref:`beaker_backend` for examples. + + .. versionadded:: 0.6.0 + +Backend-Specific Cache Arguments +-------------------------------- + +The ``<%page>``, ``<%def>``, and ``<%block>`` tags +accept any named argument that starts with the prefix ``"cache_"``. +Those arguments are then packaged up and passed along to the +underlying caching implementation, minus the ``"cache_"`` prefix. + +The actual arguments understood are determined by the backend. + +* :ref:`beaker_backend` - Includes arguments understood by + Beaker. +* :ref:`dogpile.cache_backend` - Includes arguments understood by + dogpile.cache. + +.. _beaker_backend: + +Using the Beaker Cache Backend +------------------------------ + +When using Beaker, new implementations will want to make usage +of **cache regions** so that cache configurations can be maintained +externally to templates. These configurations live under +named "regions" that can be referred to within templates themselves. + +.. versionadded:: 0.6.0 + Support for Beaker cache regions. + +For example, suppose we would like two regions. One is a "short term" +region that will store content in a memory-based dictionary, +expiring after 60 seconds. The other is a Memcached region, +where values should expire in five minutes. To configure +our :class:`.TemplateLookup`, first we get a handle to a +:class:`beaker.cache.CacheManager`: + +.. sourcecode:: python + + from beaker.cache import CacheManager + + manager = CacheManager(cache_regions={ + 'short_term':{ + 'type': 'memory', + 'expire': 60 + }, + 'long_term':{ + 'type': 'ext:memcached', + 'url': '127.0.0.1:11211', + 'expire': 300 + } + }) + + lookup = TemplateLookup( + directories=['/path/to/templates'], + module_directory='/path/to/modules', + cache_impl='beaker', + cache_args={ + 'manager':manager + } + ) + +Our templates can then opt to cache data in one of either region, +using the ``cache_region`` argument. Such as using ``short_term`` +at the ``<%page>`` level: + +.. sourcecode:: mako + + <%page cached="True" cache_region="short_term"> + + ## ... + +Or, ``long_term`` at the ``<%block>`` level: + +.. sourcecode:: mako + + <%block name="header" cached="True" cache_region="long_term"> + other text + + +The Beaker backend also works without regions. There are a +variety of arguments that can be passed to the ``cache_args`` +dictionary, which are also allowable in templates via the +``<%page>``, ``<%block>``, +and ``<%def>`` tags specific to those sections. The values +given override those specified at the :class:`.TemplateLookup` +or :class:`.Template` level. + +With the possible exception +of ``cache_timeout``, these arguments are probably better off +staying at the template configuration level. Each argument +specified as ``cache_XYZ`` in a template tag is specified +without the ``cache_`` prefix in the ``cache_args`` dictionary: + +* ``cache_timeout`` - number of seconds in which to invalidate the + cached data. After this timeout, the content is re-generated + on the next call. Available as ``timeout`` in the ``cache_args`` + dictionary. +* ``cache_type`` - type of caching. ``'memory'``, ``'file'``, ``'dbm'``, or + ``'ext:memcached'`` (note that the string ``memcached`` is + also accepted by the dogpile.cache Mako plugin, though not by Beaker itself). + Available as ``type`` in the ``cache_args`` dictionary. +* ``cache_url`` - (only used for ``memcached`` but required) a single + IP address or a semi-colon separated list of IP address of + memcache servers to use. Available as ``url`` in the ``cache_args`` + dictionary. +* ``cache_dir`` - in the case of the ``'file'`` and ``'dbm'`` cache types, + this is the filesystem directory with which to store data + files. If this option is not present, the value of + ``module_directory`` is used (i.e. the directory where compiled + template modules are stored). If neither option is available + an exception is thrown. Available as ``dir`` in the + ``cache_args`` dictionary. + +.. _dogpile.cache_backend: + +Using the dogpile.cache Backend +------------------------------- + +`dogpile.cache`_ is a new replacement for Beaker. It provides +a modernized, slimmed down interface and is generally easier to use +than Beaker. As of this writing it has not yet been released. dogpile.cache +includes its own Mako cache plugin -- see :mod:`dogpile.cache.plugins.mako_cache` in the +dogpile.cache documentation. + +Programmatic Cache Access +========================= + +The :class:`.Template`, as well as any template-derived :class:`.Namespace`, has +an accessor called ``cache`` which returns the :class:`.Cache` object +for that template. This object is a facade on top of the underlying +:class:`.CacheImpl` object, and provides some very rudimental +capabilities, such as the ability to get and put arbitrary +values: + +.. sourcecode:: mako + + <% + local.cache.set("somekey", type="memory", "somevalue") + %> + +Above, the cache associated with the ``local`` namespace is +accessed and a key is placed within a memory cache. + +More commonly, the ``cache`` object is used to invalidate cached +sections programmatically: + +.. sourcecode:: python + + template = lookup.get_template('/sometemplate.html') + + # invalidate the "body" of the template + template.cache.invalidate_body() + + # invalidate an individual def + template.cache.invalidate_def('somedef') + + # invalidate an arbitrary key + template.cache.invalidate('somekey') + +You can access any special method or attribute of the :class:`.CacheImpl` +itself using the :attr:`impl <.Cache.impl>` attribute: + +.. sourcecode:: python + + template.cache.impl.do_something_special() + +Note that using implementation-specific methods will mean you can't +swap in a different kind of :class:`.CacheImpl` implementation at a +later time. + +.. _cache_plugins: + +Cache Plugins +============= + +The mechanism used by caching can be plugged in +using a :class:`.CacheImpl` subclass. This class implements +the rudimental methods Mako needs to implement the caching +API. Mako includes the :class:`.BeakerCacheImpl` class to +provide the default implementation. A :class:`.CacheImpl` class +is acquired by Mako using a ``pkg_resources`` entrypoint, using +the name given as the ``cache_impl`` argument to :class:`.Template` +or :class:`.TemplateLookup`. This entry point can be +installed via the standard `setuptools`/``setup()`` procedure, underneath +the `EntryPoint` group named ``"mako.cache"``. It can also be +installed at runtime via a convenience installer :func:`.register_plugin` +which accomplishes essentially the same task. + +An example plugin that implements a local dictionary cache: + +.. sourcecode:: python + + from mako.cache import Cacheimpl, register_plugin + + class SimpleCacheImpl(CacheImpl): + def __init__(self, cache): + super(SimpleCacheImpl, self).__init__(cache) + self._cache = {} + + def get_or_create(self, key, creation_function, **kw): + if key in self._cache: + return self._cache[key] + else: + self._cache[key] = value = creation_function() + return value + + def set(self, key, value, **kwargs): + self._cache[key] = value + + def get(self, key, **kwargs): + return self._cache.get(key) + + def invalidate(self, key, **kwargs): + self._cache.pop(key, None) + + # optional - register the class locally + register_plugin("simple", __name__, "SimpleCacheImpl") + +Enabling the above plugin in a template would look like: + +.. sourcecode:: python + + t = Template("mytemplate", + file="mytemplate.html", + cache_impl='simple') + +Guidelines for Writing Cache Plugins +------------------------------------ + +* The :class:`.CacheImpl` is created on a per-:class:`.Template` basis. The + class should ensure that only data for the parent :class:`.Template` is + persisted or returned by the cache methods. The actual :class:`.Template` + is available via the ``self.cache.template`` attribute. The ``self.cache.id`` + attribute, which is essentially the unique modulename of the template, is + a good value to use in order to represent a unique namespace of keys specific + to the template. +* Templates only use the :meth:`.CacheImpl.get_or_create()` method + in an implicit fashion. The :meth:`.CacheImpl.set`, + :meth:`.CacheImpl.get`, and :meth:`.CacheImpl.invalidate` methods are + only used in response to direct programmatic access to the corresponding + methods on the :class:`.Cache` object. +* :class:`.CacheImpl` will be accessed in a multithreaded fashion if the + :class:`.Template` itself is used multithreaded. Care should be taken + to ensure caching implementations are threadsafe. +* A library like `Dogpile `_, which + is a minimal locking system derived from Beaker, can be used to help + implement the :meth:`.CacheImpl.get_or_create` method in a threadsafe + way that can maximize effectiveness across multiple threads as well + as processes. :meth:`.CacheImpl.get_or_create` is the + key method used by templates. +* All arguments passed to ``**kw`` come directly from the parameters + inside the ``<%def>``, ``<%block>``, or ``<%page>`` tags directly, + minus the ``"cache_"`` prefix, as strings, with the exception of + the argument ``cache_timeout``, which is passed to the plugin + as the name ``timeout`` with the value converted to an integer. + Arguments present in ``cache_args`` on :class:`.Template` or + :class:`.TemplateLookup` are passed directly, but are superseded + by those present in the most specific template tag. +* The directory where :class:`.Template` places module files can + be acquired using the accessor ``self.cache.template.module_directory``. + This directory can be a good place to throw cache-related work + files, underneath a prefix like ``_my_cache_work`` so that name + conflicts with generated modules don't occur. + +API Reference +============= + +.. autoclass:: mako.cache.Cache + :members: + :show-inheritance: + +.. autoclass:: mako.cache.CacheImpl + :members: + :show-inheritance: + +.. autofunction:: mako.cache.register_plugin + +.. autoclass:: mako.ext.beaker_cache.BeakerCacheImpl + :members: + :show-inheritance: + diff --git a/lib3/Mako-0.7.3/doc/build/conf.py b/lib3/Mako-0.7.3/doc/build/conf.py new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/conf.py @@ -0,0 +1,287 @@ +# -*- coding: utf-8 -*- +# +# Mako documentation build configuration file +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys, os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('../..')) +sys.path.insert(0, os.path.abspath('.')) + +import mako + +# -- General configuration ----------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +#extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', +# 'sphinx.ext.doctest', 'builder.builders'] + +extensions = ['sphinx.ext.autodoc','sphinx.ext.intersphinx', + 'sphinx.ext.doctest', 'builder.builders'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['templates'] + +nitpicky = True + +site_base = "http://www.makotemplates.org" + +# The suffix of source filenames. +source_suffix = '.rst' + +template_bridge = "builder.builders.MakoBridge" + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'Mako' +copyright = 'the Mako authors and contributors' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = mako.__version__ +# The full version, including alpha/beta/rc tags. +release = mako.__version__ + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['build'] + +# The reST default role (used for this markup: `text`) to use for all documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The style sheet to use for HTML and HTML Help pages. A file of that name +# must exist either in Sphinx' static/ path, or in one of the custom paths +# given in html_static_path. +html_style = 'default.css' + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +html_title = "%s %s Documentation" % (project, release) + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +html_last_updated_fmt = '%m/%d/%Y %H:%M:%S' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +html_domain_indices = False + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, the reST sources are included in the HTML build as _sources/. +#html_copy_source = True + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'Makodoc' + +#autoclass_content = 'both' + +# -- Options for LaTeX output -------------------------------------------------- + +# The paper size ('letter' or 'a4'). +#latex_paper_size = 'letter' + +# The font size ('10pt', '11pt' or '12pt'). +#latex_font_size = '10pt' + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ('index', 'mako_%s.tex' % release.replace('.', '_'), r'Mako Documentation', + r'Mike Bayer', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Additional stuff for the LaTeX preamble. +# sets TOC depth to 2. +latex_preamble = '\setcounter{tocdepth}{3}' + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + +#latex_elements = { +# 'papersize': 'letterpaper', +# 'pointsize': '10pt', +#} + +# -- Options for manual page output -------------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'mako', 'Mako Documentation', + ['Mako authors'], 1) +] + + +# -- Options for Epub output --------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = 'Mako' +epub_author = 'Mako authors' +epub_publisher = 'Mako authors' +epub_copyright = 'Mako authors' + +# The language of the text. It defaults to the language option +# or en if the language is not set. +#epub_language = '' + +# The scheme of the identifier. Typical schemes are ISBN or URL. +#epub_scheme = '' + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +#epub_identifier = '' + +# A unique identification for the text. +#epub_uid = '' + +# HTML files that should be inserted before the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_pre_files = [] + +# HTML files shat should be inserted after the pages created by sphinx. +# The format is a list of tuples containing the path and title. +#epub_post_files = [] + +# A list of files that should not be packed into the epub file. +#epub_exclude_files = [] + +# The depth of the table of contents in toc.ncx. +#epub_tocdepth = 3 + +# Allow duplicate toc entries. +#epub_tocdup = True + +intersphinx_mapping = { + 'dogpilecache':('http://dogpilecache.readthedocs.org/en/latest', None), + 'beaker':('http://beaker.readthedocs.org/en/latest',None), +} diff --git a/lib3/Mako-0.7.3/doc/build/defs.rst b/lib3/Mako-0.7.3/doc/build/defs.rst new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/defs.rst @@ -0,0 +1,622 @@ +.. _defs_toplevel: + +=============== +Defs and Blocks +=============== + +``<%def>`` and ``<%block>`` are two tags that both demarcate any block of text +and/or code. They both exist within generated Python as a callable function, +i.e., a Python ``def``. They differ in their scope and calling semantics. +Whereas ``<%def>`` provides a construct that is very much like a named Python +``def``, the ``<%block>`` is more layout oriented. + +Using Defs +========== + +The ``<%def>`` tag requires a ``name`` attribute, where the ``name`` references +a Python function signature: + +.. sourcecode:: mako + + <%def name="hello()"> + hello world + + +To invoke the ``<%def>``, it is normally called as an expression: + +.. sourcecode:: mako + + the def: ${hello()} + +If the ``<%def>`` is not nested inside of another ``<%def>``, +it's known as a **top level def** and can be accessed anywhere in +the template, including above where it was defined. + +All defs, top level or not, have access to the current +contextual namespace in exactly the same way their containing +template does. Suppose the template below is executed with the +variables ``username`` and ``accountdata`` inside the context: + +.. sourcecode:: mako + + Hello there ${username}, how are ya. Lets see what your account says: + + ${account()} + + <%def name="account()"> + Account for ${username}:
      + + % for row in accountdata: + Value: ${row}
      + % endfor + + +The ``username`` and ``accountdata`` variables are present +within the main template body as well as the body of the +``account()`` def. + +Since defs are just Python functions, you can define and pass +arguments to them as well: + +.. sourcecode:: mako + + ${account(accountname='john')} + + <%def name="account(accountname, type='regular')"> + account name: ${accountname}, type: ${type} + + +When you declare an argument signature for your def, they are +required to follow normal Python conventions (i.e., all +arguments are required except keyword arguments with a default +value). This is in contrast to using context-level variables, +which evaluate to ``UNDEFINED`` if you reference a name that +does not exist. + +Calling Defs from Other Files +----------------------------- + +Top level ``<%def>``\ s are **exported** by your template's +module, and can be called from the outside; including from other +templates, as well as normal Python code. Calling a ``<%def>`` +from another template is something like using an ``<%include>`` +-- except you are calling a specific function within the +template, not the whole template. + +The remote ``<%def>`` call is also a little bit like calling +functions from other modules in Python. There is an "import" +step to pull the names from another template into your own +template; then the function or functions are available. + +To import another template, use the ``<%namespace>`` tag: + +.. sourcecode:: mako + + <%namespace name="mystuff" file="mystuff.html"/> + +The above tag adds a local variable ``mystuff`` to the current +scope. + +Then, just call the defs off of ``mystuff``: + +.. sourcecode:: mako + + ${mystuff.somedef(x=5,y=7)} + +The ``<%namespace>`` tag also supports some of the other +semantics of Python's ``import`` statement, including pulling +names into the local variable space, or using ``*`` to represent +all names, using the ``import`` attribute: + +.. sourcecode:: mako + + <%namespace file="mystuff.html" import="foo, bar"/> + +This is just a quick intro to the concept of a **namespace**, +which is a central Mako concept that has its own chapter in +these docs. For more detail and examples, see +:ref:`namespaces_toplevel`. + +Calling Defs Programmatically +----------------------------- + +You can call defs programmatically from any :class:`.Template` object +using the :meth:`~.Template.get_def()` method, which returns a :class:`.DefTemplate` +object. This is a :class:`.Template` subclass which the parent +:class:`.Template` creates, and is usable like any other template: + +.. sourcecode:: python + + from mako.template import Template + + template = Template(""" + <%def name="hi(name)"> + hi ${name}! + + + <%def name="bye(name)"> + bye ${name}! + + """) + + print template.get_def("hi").render(name="ed") + print template.get_def("bye").render(name="ed") + +Defs within Defs +---------------- + +The def model follows regular Python rules for closures. +Declaring ``<%def>`` inside another ``<%def>`` declares it +within the parent's **enclosing scope**: + +.. sourcecode:: mako + + <%def name="mydef()"> + <%def name="subdef()"> + a sub def + + + i'm the def, and the subcomponent is ${subdef()} + + +Just like Python, names that exist outside the inner ``<%def>`` +exist inside it as well: + +.. sourcecode:: mako + + <% + x = 12 + %> + <%def name="outer()"> + <% + y = 15 + %> + <%def name="inner()"> + inner, x is ${x}, y is ${y} + + + outer, x is ${x}, y is ${y} + + +Assigning to a name inside of a def declares that name as local +to the scope of that def (again, like Python itself). This means +the following code will raise an error: + +.. sourcecode:: mako + + <% + x = 10 + %> + <%def name="somedef()"> + ## error ! + somedef, x is ${x} + <% + x = 27 + %> + + +...because the assignment to ``x`` declares ``x`` as local to the +scope of ``somedef``, rendering the "outer" version unreachable +in the expression that tries to render it. + +.. _defs_with_content: + +Calling a Def with Embedded Content and/or Other Defs +----------------------------------------------------- + +A flip-side to def within def is a def call with content. This +is where you call a def, and at the same time declare a block of +content (or multiple blocks) that can be used by the def being +called. The main point of such a call is to create custom, +nestable tags, just like any other template language's +custom-tag creation system -- where the external tag controls the +execution of the nested tags and can communicate state to them. +Only with Mako, you don't have to use any external Python +modules, you can define arbitrarily nestable tags right in your +templates. + +To achieve this, the target def is invoked using the form +``<%namepacename:defname>`` instead of the normal ``${}`` +syntax. This syntax, introduced in Mako 0.2.3, is functionally +equivalent to another tag known as ``%call``, which takes the form +``<%call expr='namespacename.defname(args)'>``. While ``%call`` +is available in all versions of Mako, the newer style is +probably more familiar looking. The ``namespace`` portion of the +call is the name of the **namespace** in which the def is +defined -- in the most simple cases, this can be ``local`` or +``self`` to reference the current template's namespace (the +difference between ``local`` and ``self`` is one of inheritance +-- see :ref:`namespaces_builtin` for details). + +When the target def is invoked, a variable ``caller`` is placed +in its context which contains another namespace containing the +body and other defs defined by the caller. The body itself is +referenced by the method ``body()``. Below, we build a ``%def`` +that operates upon ``caller.body()`` to invoke the body of the +custom tag: + +.. sourcecode:: mako + + <%def name="buildtable()"> + + +
      + ${caller.body()} +
      + + + <%self:buildtable> + I am the table body. + + +This produces the output (whitespace formatted): + +.. sourcecode:: html + + + +
      + I am the table body. +
      + +Using the older ``%call`` syntax looks like: + +.. sourcecode:: mako + + <%def name="buildtable()"> + + +
      + ${caller.body()} +
      + + + <%call expr="buildtable()"> + I am the table body. + + +The ``body()`` can be executed multiple times or not at all. +This means you can use def-call-with-content to build iterators, +conditionals, etc: + +.. sourcecode:: mako + + <%def name="lister(count)"> + % for x in range(count): + ${caller.body()} + % endfor + + + <%self:lister count="${3}"> + hi + + +Produces: + +.. sourcecode:: html + + hi + hi + hi + +Notice above we pass ``3`` as a Python expression, so that it +remains as an integer. + +A custom "conditional" tag: + +.. sourcecode:: mako + + <%def name="conditional(expression)"> + % if expression: + ${caller.body()} + % endif + + + <%self:conditional expression="${4==4}"> + i'm the result + + +Produces: + +.. sourcecode:: html + + i'm the result + +But that's not all. The ``body()`` function also can handle +arguments, which will augment the local namespace of the body +callable. The caller must define the arguments which it expects +to receive from its target def using the ``args`` attribute, +which is a comma-separated list of argument names. Below, our +``<%def>`` calls the ``body()`` of its caller, passing in an +element of data from its argument: + +.. sourcecode:: mako + + <%def name="layoutdata(somedata)"> + + % for item in somedata: + + % for col in item: + + % endfor + + % endfor +
      ${caller.body(col=col)}
      + + + <%self:layoutdata somedata="${[[1,2,3],[4,5,6],[7,8,9]]}" args="col">\ + Body data: ${col}\ + + +Produces: + +.. sourcecode:: html + + + + + + + + + + + + + + + + + +
      Body data: 1Body data: 2Body data: 3
      Body data: 4Body data: 5Body data: 6
      Body data: 7Body data: 8Body data: 9
      + +You don't have to stick to calling just the ``body()`` function. +The caller can define any number of callables, allowing the +``<%call>`` tag to produce whole layouts: + +.. sourcecode:: mako + + <%def name="layout()"> + ## a layout def +
      +
      + ${caller.header()} +
      + + + +
      + ${caller.body()} +
      +
      + + + ## calls the layout def + <%self:layout> + <%def name="header()"> + I am the header + + <%def name="sidebar()"> +
        +
      • sidebar 1
      • +
      • sidebar 2
      • +
      + + + this is the body + + +The above layout would produce: + +.. sourcecode:: html + +
      +
      + I am the header +
      + + + +
      + this is the body +
      +
      + +The number of things you can do with ``<%call>`` and/or the +``<%namespacename:defname>`` calling syntax is enormous. You can +create form widget libraries, such as an enclosing ``
      `` +tag and nested HTML input elements, or portable wrapping schemes +using ``
      `` or other elements. You can create tags that +interpret rows of data, such as from a database, providing the +individual columns of each row to a ``body()`` callable which +lays out the row any way it wants. Basically anything you'd do +with a "custom tag" or tag library in some other system, Mako +provides via ``<%def>`` tags and plain Python callables which are +invoked via ``<%namespacename:defname>`` or ``<%call>``. + +.. _blocks: + +Using Blocks +============ + +The ``<%block>`` tag introduces some new twists on the +``<%def>`` tag which make it more closely tailored towards layout. + +.. versionadded:: 0.4.1 + +An example of a block: + +.. sourcecode:: mako + + + + <%block> + this is a block. + + + + +In the above example, we define a simple block. The block renders its content in the place +that it's defined. Since the block is called for us, it doesn't need a name and the above +is referred to as an **anonymous block**. So the output of the above template will be: + +.. sourcecode:: html + + + + this is a block. + + + +So in fact the above block has absolutely no effect. Its usefulness comes when we start +using modifiers. Such as, we can apply a filter to our block: + +.. sourcecode:: mako + + + + <%block filter="h"> + this is some escaped html. + + + + +or perhaps a caching directive: + +.. sourcecode:: mako + + + + <%block cached="True" cache_timeout="60"> + This content will be cached for 60 seconds. + + + + +Blocks also work in iterations, conditionals, just like defs: + +.. sourcecode:: mako + + % if some_condition: + <%block>condition is met + % endif + +While the block renders at the point it is defined in the template, +the underlying function is present in the generated Python code only +once, so there's no issue with placing a block inside of a loop or +similar. Anonymous blocks are defined as closures in the local +rendering body, so have access to local variable scope: + +.. sourcecode:: mako + + % for i in range(1, 4): + <%block>i is ${i} + % endfor + +Using Named Blocks +------------------ + +Possibly the more important area where blocks are useful is when we +do actually give them names. Named blocks are tailored to behave +somewhat closely to Jinja2's block tag, in that they define an area +of a layout which can be overridden by an inheriting template. In +sharp contrast to the ``<%def>`` tag, the name given to a block is +global for the entire template regardless of how deeply it's nested: + +.. sourcecode:: mako + + + <%block name="header"> + + + <%block name="title">Title</%block> + + + + + ${next.body()} + + + +The above example has two named blocks "``header``" and "``title``", both of which can be referred to +by an inheriting template. A detailed walkthrough of this usage can be found at :ref:`inheritance_toplevel`. + +Note above that named blocks don't have any argument declaration the way defs do. They still implement themselves +as Python functions, however, so they can be invoked additional times beyond their initial definition: + +.. sourcecode:: mako + +
      + <%block name="pagecontrol"> + previous page | + next page + + + + ## some content +
      + + ${pagecontrol()} +
      + +The content referenced by ``pagecontrol`` above will be rendered both above and below the ```` tags. + +To keep things sane, named blocks have restrictions that defs do not: + +* The ``<%block>`` declaration cannot have any argument signature. +* The name of a ``<%block>`` can only be defined once in a template -- an error is raised if two blocks of the same + name occur anywhere in a single template, regardless of nesting. A similar error is raised if a top level def + shares the same name as that of a block. +* A named ``<%block>`` cannot be defined within a ``<%def>``, or inside the body of a "call", i.e. + ``<%call>`` or ``<%namespacename:defname>`` tag. Anonymous blocks can, however. + +Using Page Arguments in Named Blocks +------------------------------------ + +A named block is very much like a top level def. It has a similar +restriction to these types of defs in that arguments passed to the +template via the ``<%page>`` tag aren't automatically available. +Using arguments with the ``<%page>`` tag is described in the section +:ref:`namespaces_body`, and refers to scenarios such as when the +``body()`` method of a template is called from an inherited template passing +arguments, or the template is invoked from an ``<%include>`` tag +with arguments. To allow a named block to share the same arguments +passed to the page, the ``args`` attribute can be used: + +.. sourcecode:: mako + + <%page args="post"/> + + + + + <%block name="post_prose" args="post"> + ${post.content} + + + +Where above, if the template is called via a directive like +``<%include file="post.mako" args="post=post" />``, the ``post`` +variable is available both in the main body as well as the +``post_prose`` block. + +Similarly, the ``**pageargs`` variable is present, in named blocks only, +for those arguments not explicit in the ``<%page>`` tag: + +.. sourcecode:: mako + + <%block name="post_prose"> + ${pageargs['post'].content} + + +The ``args`` attribute is only allowed with named blocks. With +anonymous blocks, the Python function is always rendered in the same +scope as the call itself, so anything available directly outside the +anonymous block is available inside as well. diff --git a/lib3/Mako-0.7.3/doc/build/filtering.rst b/lib3/Mako-0.7.3/doc/build/filtering.rst new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/filtering.rst @@ -0,0 +1,344 @@ +.. _filtering_toplevel: + +======================= +Filtering and Buffering +======================= + +Expression Filtering +==================== + +As described in the chapter :ref:`syntax_toplevel`, the "``|``" operator can be +applied to a "``${}``" expression to apply escape filters to the +output: + +.. sourcecode:: mako + + ${"this is some text" | u} + +The above expression applies URL escaping to the expression, and +produces ``this+is+some+text``. + +The built-in escape flags are: + +* ``u`` : URL escaping, provided by + ``urllib.quote_plus(string.encode('utf-8'))`` +* ``h`` : HTML escaping, provided by + ``markupsafe.escape(string)`` + + .. versionadded:: 0.3.4 + Prior versions use ``cgi.escape(string, True)``. + +* ``x`` : XML escaping +* ``trim`` : whitespace trimming, provided by ``string.strip()`` +* ``entity`` : produces HTML entity references for applicable + strings, derived from ``htmlentitydefs`` +* ``unicode`` (``str`` on Python 3): produces a Python unicode + string (this function is applied by default) +* ``decode.``: decode input into a Python + unicode with the specified encoding +* ``n`` : disable all default filtering; only filters specified + in the local expression tag will be applied. + +To apply more than one filter, separate them by a comma: + +.. sourcecode:: mako + + ${" some value " | h,trim} + +The above produces ``<tag>some value</tag>``, with +no leading or trailing whitespace. The HTML escaping function is +applied first, the "trim" function second. + +Naturally, you can make your own filters too. A filter is just a +Python function that accepts a single string argument, and +returns the filtered result. The expressions after the ``|`` +operator draw upon the local namespace of the template in which +they appear, meaning you can define escaping functions locally: + +.. sourcecode:: mako + + <%! + def myescape(text): + return "" + text + "" + %> + + Here's some tagged text: ${"text" | myescape} + +Or from any Python module: + +.. sourcecode:: mako + + <%! + import myfilters + %> + + Here's some tagged text: ${"text" | myfilters.tagfilter} + +A page can apply a default set of filters to all expression tags +using the ``expression_filter`` argument to the ``%page`` tag: + +.. sourcecode:: mako + + <%page expression_filter="h"/> + + Escaped text: ${"some html"} + +Result: + +.. sourcecode:: html + + Escaped text: <html>some html</html> + +.. _filtering_default_filters: + +The ``default_filters`` Argument +-------------------------------- + +In addition to the ``expression_filter`` argument, the +``default_filters`` argument to both :class:`.Template` and +:class:`.TemplateLookup` can specify filtering for all expression tags +at the programmatic level. This array-based argument, when given +its default argument of ``None``, will be internally set to +``["unicode"]`` (or ``["str"]`` on Python 3), except when +``disable_unicode=True`` is set in which case it defaults to +``["str"]``: + +.. sourcecode:: python + + t = TemplateLookup(directories=['/tmp'], default_filters=['unicode']) + +To replace the usual ``unicode``/``str`` function with a +specific encoding, the ``decode`` filter can be substituted: + +.. sourcecode:: python + + t = TemplateLookup(directories=['/tmp'], default_filters=['decode.utf8']) + +To disable ``default_filters`` entirely, set it to an empty +list: + +.. sourcecode:: python + + t = TemplateLookup(directories=['/tmp'], default_filters=[]) + +Any string name can be added to ``default_filters`` where it +will be added to all expressions as a filter. The filters are +applied from left to right, meaning the leftmost filter is +applied first. + +.. sourcecode:: python + + t = Template(templatetext, default_filters=['unicode', 'myfilter']) + +To ease the usage of ``default_filters`` with custom filters, +you can also add imports (or other code) to all templates using +the ``imports`` argument: + +.. sourcecode:: python + + t = TemplateLookup(directories=['/tmp'], + default_filters=['unicode', 'myfilter'], + imports=['from mypackage import myfilter']) + +The above will generate templates something like this: + +.. sourcecode:: python + + # .... + from mypackage import myfilter + + def render_body(context): + context.write(myfilter(unicode("some text"))) + +Turning off Filtering with the ``n`` Filter +------------------------------------------- + +In all cases the special ``n`` filter, used locally within an +expression, will **disable** all filters declared in the +``<%page>`` tag as well as in ``default_filters``. Such as: + +.. sourcecode:: mako + + ${'myexpression' | n} + +will render ``myexpression`` with no filtering of any kind, and: + +.. sourcecode:: mako + + ${'myexpression' | n,trim} + +will render ``myexpression`` using the ``trim`` filter only. + +Filtering Defs and Blocks +========================= + +The ``%def`` and ``%block`` tags have an argument called ``filter`` which will apply the +given list of filter functions to the output of the ``%def``: + +.. sourcecode:: mako + + <%def name="foo()" filter="h, trim"> + this is bold + + +When the ``filter`` attribute is applied to a def as above, the def +is automatically **buffered** as well. This is described next. + +Buffering +========= + +One of Mako's central design goals is speed. To this end, all of +the textual content within a template and its various callables +is by default piped directly to the single buffer that is stored +within the :class:`.Context` object. While this normally is easy to +miss, it has certain side effects. The main one is that when you +call a def using the normal expression syntax, i.e. +``${somedef()}``, it may appear that the return value of the +function is the content it produced, which is then delivered to +your template just like any other expression substitution, +except that normally, this is not the case; the return value of +``${somedef()}`` is simply the empty string ``''``. By the time +you receive this empty string, the output of ``somedef()`` has +been sent to the underlying buffer. + +You may not want this effect, if for example you are doing +something like this: + +.. sourcecode:: mako + + ${" results " + somedef() + " more results "} + +If the ``somedef()`` function produced the content "``somedef's +results``", the above template would produce this output: + +.. sourcecode:: html + + somedef's results results more results + +This is because ``somedef()`` fully executes before the +expression returns the results of its concatenation; the +concatenation in turn receives just the empty string as its +middle expression. + +Mako provides two ways to work around this. One is by applying +buffering to the ``%def`` itself: + +.. sourcecode:: mako + + <%def name="somedef()" buffered="True"> + somedef's results + + +The above definition will generate code similar to this: + +.. sourcecode:: python + + def somedef(): + context.push_buffer() + try: + context.write("somedef's results") + finally: + buf = context.pop_buffer() + return buf.getvalue() + +So that the content of ``somedef()`` is sent to a second buffer, +which is then popped off the stack and its value returned. The +speed hit inherent in buffering the output of a def is also +apparent. + +Note that the ``filter`` argument on ``%def`` also causes the def to +be buffered. This is so that the final content of the ``%def`` can +be delivered to the escaping function in one batch, which +reduces method calls and also produces more deterministic +behavior for the filtering function itself, which can possibly +be useful for a filtering function that wishes to apply a +transformation to the text as a whole. + +The other way to buffer the output of a def or any Mako callable +is by using the built-in ``capture`` function. This function +performs an operation similar to the above buffering operation +except it is specified by the caller. + +.. sourcecode:: mako + + ${" results " + capture(somedef) + " more results "} + +Note that the first argument to the ``capture`` function is +**the function itself**, not the result of calling it. This is +because the ``capture`` function takes over the job of actually +calling the target function, after setting up a buffered +environment. To send arguments to the function, just send them +to ``capture`` instead: + +.. sourcecode:: mako + + ${capture(somedef, 17, 'hi', use_paging=True)} + +The above call is equivalent to the unbuffered call: + +.. sourcecode:: mako + + ${somedef(17, 'hi', use_paging=True)} + +Decorating +========== + +.. versionadded:: 0.2.5 + +Somewhat like a filter for a ``%def`` but more flexible, the ``decorator`` +argument to ``%def`` allows the creation of a function that will +work in a similar manner to a Python decorator. The function can +control whether or not the function executes. The original +intent of this function is to allow the creation of custom cache +logic, but there may be other uses as well. + +``decorator`` is intended to be used with a regular Python +function, such as one defined in a library module. Here we'll +illustrate the python function defined in the template for +simplicities' sake: + +.. sourcecode:: mako + + <%! + def bar(fn): + def decorate(context, *args, **kw): + context.write("BAR") + fn(*args, **kw) + context.write("BAR") + return '' + return decorate + %> + + <%def name="foo()" decorator="bar"> + this is foo + + + ${foo()} + +The above template will return, with more whitespace than this, +``"BAR this is foo BAR"``. The function is the render callable +itself (or possibly a wrapper around it), and by default will +write to the context. To capture its output, use the :func:`.capture` +callable in the ``mako.runtime`` module (available in templates +as just ``runtime``): + +.. sourcecode:: mako + + <%! + def bar(fn): + def decorate(context, *args, **kw): + return "BAR" + runtime.capture(context, fn, *args, **kw) + "BAR" + return decorate + %> + + <%def name="foo()" decorator="bar"> + this is foo + + + ${foo()} + +The decorator can be used with top-level defs as well as nested +defs, and blocks too. Note that when calling a top-level def from the +:class:`.Template` API, i.e. ``template.get_def('somedef').render()``, +the decorator has to write the output to the ``context``, i.e. +as in the first example. The return value gets discarded. diff --git a/lib3/Mako-0.7.3/doc/build/index.rst b/lib3/Mako-0.7.3/doc/build/index.rst new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/index.rst @@ -0,0 +1,22 @@ +Table of Contents +================= + +.. toctree:: + :maxdepth: 2 + + usage + syntax + defs + runtime + namespaces + inheritance + filtering + unicode + caching + +Indices and Tables +------------------ + +* :ref:`genindex` +* :ref:`search` + diff --git a/lib3/Mako-0.7.3/doc/build/inheritance.rst b/lib3/Mako-0.7.3/doc/build/inheritance.rst new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/inheritance.rst @@ -0,0 +1,534 @@ +.. _inheritance_toplevel: + +=========== +Inheritance +=========== + +.. note:: Most of the inheritance examples here take advantage of a feature that's + new in Mako as of version 0.4.1 called the "block". This tag is very similar to + the "def" tag but is more streamlined for usage with inheritance. Note that + all of the examples here which use blocks can also use defs instead. Contrasting + usages will be illustrated. + +Using template inheritance, two or more templates can organize +themselves into an **inheritance chain**, where content and +functions from all involved templates can be intermixed. The +general paradigm of template inheritance is this: if a template +``A`` inherits from template ``B``, then template ``A`` agrees +to send the executional control to template ``B`` at runtime +(``A`` is called the **inheriting** template). Template ``B``, +the **inherited** template, then makes decisions as to what +resources from ``A`` shall be executed. + +In practice, it looks like this. Here's a hypothetical inheriting +template, ``index.html``: + +.. sourcecode:: mako + + ## index.html + <%inherit file="base.html"/> + + <%block name="header"> + this is some header content + + + this is the body content. + +And ``base.html``, the inherited template: + +.. sourcecode:: mako + + ## base.html + + +
      + <%block name="header"/> +
      + + ${self.body()} + + + + + +Here is a breakdown of the execution: + +#. When ``index.html`` is rendered, control immediately passes to + ``base.html``. +#. ``base.html`` then renders the top part of an HTML document, + then invokes the ``<%block name="header">`` block. It invokes the + underlying ``header()`` function off of a built-in namespace + called ``self`` (this namespace was first introduced in the + :doc:`Namespaces chapter ` in :ref:`namespace_self`). Since + ``index.html`` is the topmost template and also defines a block + called ``header``, it's this ``header`` block that ultimately gets + executed -- instead of the one that's present in ``base.html``. +#. Control comes back to ``base.html``. Some more HTML is + rendered. +#. ``base.html`` executes ``self.body()``. The ``body()`` + function on all template-based namespaces refers to the main + body of the template, therefore the main body of + ``index.html`` is rendered. +#. When ``<%block name="header">`` is encountered in ``index.html`` + during the ``self.body()`` call, a conditional is checked -- does the + current inherited template, i.e. ``base.html``, also define this block? If yes, + the ``<%block>`` is **not** executed here -- the inheritance + mechanism knows that the parent template is responsible for rendering + this block (and in fact it already has). In other words a block + only renders in its *basemost scope*. +#. Control comes back to ``base.html``. More HTML is rendered, + then the ``<%block name="footer">`` expression is invoked. +#. The ``footer`` block is only defined in ``base.html``, so being + the topmost definition of ``footer``, it's the one that + executes. If ``index.html`` also specified ``footer``, then + its version would **override** that of the base. +#. ``base.html`` finishes up rendering its HTML and the template + is complete, producing: + + .. sourcecode:: html + + + +
      + this is some header content +
      + + this is the body content. + + + + + +...and that is template inheritance in a nutshell. The main idea +is that the methods that you call upon ``self`` always +correspond to the topmost definition of that method. Very much +the way ``self`` works in a Python class, even though Mako is +not actually using Python class inheritance to implement this +functionality. (Mako doesn't take the "inheritance" metaphor too +seriously; while useful to setup some commonly recognized +semantics, a textual template is not very much like an +object-oriented class construct in practice). + +Nesting Blocks +============== + +The named blocks defined in an inherited template can also be nested within +other blocks. The name given to each block is globally accessible via any inheriting +template. We can add a new block ``title`` to our ``header`` block: + +.. sourcecode:: mako + + ## base.html + + +
      + <%block name="header"> +

      + <%block name="title"/> +

      + +
      + + ${self.body()} + + + + + +The inheriting template can name either or both of ``header`` and ``title``, separately +or nested themselves: + +.. sourcecode:: mako + + ## index.html + <%inherit file="base.html"/> + + <%block name="header"> + this is some header content + ${parent.header()} + + + <%block name="title"> + this is the title + + + this is the body content. + +Note when we overrode ``header``, we added an extra call ``${parent.header()}`` in order to invoke +the parent's ``header`` block in addition to our own. That's described in more detail below, +in :ref:`parent_namespace`. + +Rendering a Named Block Multiple Times +====================================== + +Recall from the section :ref:`blocks` that a named block is just like a ``<%def>``, +with some different usage rules. We can call one of our named sections distinctly, for example +a section that is used more than once, such as the title of a page: + +.. sourcecode:: mako + + + + ${self.title()} + + + <%block name="header"> +

      <%block name="title"/>

      + + ${self.body()} + + + +Where above an inheriting template can define ``<%block name="title">`` just once, and it will be +used in the base template both in the ```` section as well as the ``<h2>``. + +But what about Defs? +==================== + +The previous example used the ``<%block>`` tag to produce areas of content +to be overridden. Before Mako 0.4.1, there wasn't any such tag -- instead +there was only the ``<%def>`` tag. As it turns out, named blocks and defs are +largely interchangeable. The def simply doesn't call itself automatically, +and has more open-ended naming and scoping rules that are more flexible and similar +to Python itself, but less suited towards layout. The first example from +this chapter using defs would look like: + +.. sourcecode:: mako + + ## index.html + <%inherit file="base.html"/> + + <%def name="header()"> + this is some header content + </%def> + + this is the body content. + +And ``base.html``, the inherited template: + +.. sourcecode:: mako + + ## base.html + <html> + <body> + <div class="header"> + ${self.header()} + </div> + + ${self.body()} + + <div class="footer"> + ${self.footer()} + </div> + </body> + </html> + + <%def name="header()"/> + <%def name="footer()"> + this is the footer + </%def> + +Above, we illustrate that defs differ from blocks in that their definition +and invocation are defined in two separate places, instead of at once. You can *almost* do exactly what a +block does if you put the two together: + +.. sourcecode:: mako + + <div class="header"> + <%def name="header()"></%def>${self.header()} + </div> + +The ``<%block>`` is obviously more streamlined than the ``<%def>`` for this kind +of usage. In addition, +the above "inline" approach with ``<%def>`` does not work with nesting: + +.. sourcecode:: mako + + <head> + <%def name="header()"> + <title> + ## this won't work ! + <%def name="title()">default title</%def>${self.title()} + + ${self.header()} + + +Where above, the ``title()`` def, because it's a def within a def, is not part of the +template's exported namespace and will not be part of ``self``. If the inherited template +did define its own ``title`` def at the top level, it would be called, but the "default title" +above is not present at all on ``self`` no matter what. For this to work as expected +you'd instead need to say: + +.. sourcecode:: mako + + + <%def name="header()"> + + ${self.title()} + + ${self.header()} + + <%def name="title()"/> + + +That is, ``title`` is defined outside of any other defs so that it is in the ``self`` namespace. +It works, but the definition needs to be potentially far away from the point of render. + +A named block is always placed in the ``self`` namespace, regardless of nesting, +so this restriction is lifted: + +.. sourcecode:: mako + + ## base.html + + <%block name="header"> + + <%block name="title"/> + + + + +The above template defines ``title`` inside of ``header``, and an inheriting template can define +one or both in **any** configuration, nested inside each other or not, in order for them to be used: + +.. sourcecode:: mako + + ## index.html + <%inherit file="base.html"/> + <%block name="title"> + the title + + <%block name="header"> + the header + + +So while the ``<%block>`` tag lifts the restriction of nested blocks not being available externally, +in order to achieve this it *adds* the restriction that all block names in a single template need +to be globally unique within the template, and additionally that a ``<%block>`` can't be defined +inside of a ``<%def>``. It's a more restricted tag suited towards a more specific use case than ``<%def>``. + +Using the ``next`` Namespace to Produce Content Wrapping +======================================================== + +Sometimes you have an inheritance chain that spans more than two +templates. Or maybe you don't, but you'd like to build your +system such that extra inherited templates can be inserted in +the middle of a chain where they would be smoothly integrated. +If each template wants to define its layout just within its main +body, you can't just call ``self.body()`` to get at the +inheriting template's body, since that is only the topmost body. +To get at the body of the *next* template, you call upon the +namespace ``next``, which is the namespace of the template +**immediately following** the current template. + +Lets change the line in ``base.html`` which calls upon +``self.body()`` to instead call upon ``next.body()``: + +.. sourcecode:: mako + + ## base.html + + +
      + <%block name="header"/> +
      + + ${next.body()} + + + + + + +Lets also add an intermediate template called ``layout.html``, +which inherits from ``base.html``: + +.. sourcecode:: mako + + ## layout.html + <%inherit file="base.html"/> +
        + <%block name="toolbar"> +
      • selection 1
      • +
      • selection 2
      • +
      • selection 3
      • + +
      +
      + ${next.body()} +
      + +And finally change ``index.html`` to inherit from +``layout.html`` instead: + +.. sourcecode:: mako + + ## index.html + <%inherit file="layout.html"/> + + ## .. rest of template + +In this setup, each call to ``next.body()`` will render the body +of the next template in the inheritance chain (which can be +written as ``base.html -> layout.html -> index.html``). Control +is still first passed to the bottommost template ``base.html``, +and ``self`` still references the topmost definition of any +particular def. + +The output we get would be: + +.. sourcecode:: html + + + +
      + this is some header content +
      + +
        +
      • selection 1
      • +
      • selection 2
      • +
      • selection 3
      • +
      + +
      + this is the body content. +
      + + + + + +So above, we have the ````, ```` and +``header``/``footer`` layout of ``base.html``, we have the +``
        `` and ``mainlayout`` section of ``layout.html``, and the +main body of ``index.html`` as well as its overridden ``header`` +def. The ``layout.html`` template is inserted into the middle of +the chain without ``base.html`` having to change anything. +Without the ``next`` namespace, only the main body of +``index.html`` could be used; there would be no way to call +``layout.html``'s body content. + +.. _parent_namespace: + +Using the ``parent`` Namespace to Augment Defs +============================================== + +Lets now look at the other inheritance-specific namespace, the +opposite of ``next`` called ``parent``. ``parent`` is the +namespace of the template **immediately preceding** the current +template. What's useful about this namespace is that +defs or blocks can call upon their overridden versions. +This is not as hard as it sounds and +is very much like using the ``super`` keyword in Python. Lets +modify ``index.html`` to augment the list of selections provided +by the ``toolbar`` function in ``layout.html``: + +.. sourcecode:: mako + + ## index.html + <%inherit file="layout.html"/> + + <%block name="header"> + this is some header content + + + <%block name="toolbar"> + ## call the parent's toolbar first + ${parent.toolbar()} +
      • selection 4
      • +
      • selection 5
      • + + + this is the body content. + +Above, we implemented a ``toolbar()`` function, which is meant +to override the definition of ``toolbar`` within the inherited +template ``layout.html``. However, since we want the content +from that of ``layout.html`` as well, we call it via the +``parent`` namespace whenever we want it's content, in this case +before we add our own selections. So the output for the whole +thing is now: + +.. sourcecode:: html + + + +
        + this is some header content +
        + +
          +
        • selection 1
        • +
        • selection 2
        • +
        • selection 3
        • +
        • selection 4
        • +
        • selection 5
        • +
        + +
        + this is the body content. +
        + + + + + +and you're now a template inheritance ninja! + +Inheritable Attributes +====================== + +The :attr:`attr <.Namespace.attr>` accessor of the :class:`.Namespace` object +allows access to module level variables declared in a template. By accessing +``self.attr``, you can access regular attributes from the +inheritance chain as declared in ``<%! %>`` sections. Such as: + +.. sourcecode:: mako + + <%! + class_ = "grey" + %> + +
        + ${self.body()} +
        + +If an inheriting template overrides ``class_`` to be +``"white"``, as in: + +.. sourcecode:: mako + + <%! + class_ = "white" + %> + <%inherit file="parent.html"/> + + This is the body + +you'll get output like: + +.. sourcecode:: html + +
        + This is the body +
        + diff --git a/lib3/Mako-0.7.3/doc/build/namespaces.rst b/lib3/Mako-0.7.3/doc/build/namespaces.rst new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/namespaces.rst @@ -0,0 +1,349 @@ +.. _namespaces_toplevel: + +========== +Namespaces +========== + +Namespaces are used to organize groups of defs into +categories, and also to "import" defs from other files. + +If the file ``components.html`` defines these two defs: + +.. sourcecode:: mako + + ## components.html + <%def name="comp1()"> + this is comp1 + + + <%def name="comp2(x)"> + this is comp2, x is ${x} + + +you can make another file, for example ``index.html``, that +pulls those two defs into a namespace called ``comp``: + +.. sourcecode:: mako + + ## index.html + <%namespace name="comp" file="components.html"/> + + Here's comp1: ${comp.comp1()} + Here's comp2: ${comp.comp2(x=5)} + +The ``comp`` variable above is an instance of +:class:`.Namespace`, a **proxy object** which delivers +method calls to the underlying template callable using the +current context. + +``<%namespace>`` also provides an ``import`` attribute which can +be used to pull the names into the local namespace, removing the +need to call it via the "``.``" operator. When ``import`` is used, the +``name`` attribute is optional. + +.. sourcecode:: mako + + <%namespace file="components.html" import="comp1, comp2"/> + + Heres comp1: ${comp1()} + Heres comp2: ${comp2(x=5)} + +``import`` also supports the "``*``" operator: + +.. sourcecode:: mako + + <%namespace file="components.html" import="*"/> + + Heres comp1: ${comp1()} + Heres comp2: ${comp2(x=5)} + +The names imported by the ``import`` attribute take precedence +over any names that exist within the current context. + +.. note:: In current versions of Mako, usage of ``import='*'`` is + known to decrease performance of the template. This will be + fixed in a future release. + +The ``file`` argument allows expressions -- if looking for +context variables, the ``context`` must be named explicitly: + +.. sourcecode:: mako + + <%namespace name="dyn" file="${context['namespace_name']}"/> + +Ways to Call Namespaces +======================= + +There are essentially four ways to call a function from a +namespace. + +The "expression" format, as described previously. Namespaces are +just Python objects with functions on them, and can be used in +expressions like any other function: + +.. sourcecode:: mako + + ${mynamespace.somefunction('some arg1', 'some arg2', arg3='some arg3', arg4='some arg4')} + +Synonymous with the "expression" format is the "custom tag" +format, when a "closed" tag is used. This format, introduced in +Mako 0.2.3, allows the usage of a "custom" Mako tag, with the +function arguments passed in using named attributes: + +.. sourcecode:: mako + + <%mynamespace:somefunction arg1="some arg1" arg2="some arg2" arg3="some arg3" arg4="some arg4"/> + +When using tags, the values of the arguments are taken as +literal strings by default. To embed Python expressions as +arguments, use the embedded expression format: + +.. sourcecode:: mako + + <%mynamespace:somefunction arg1="${someobject.format()}" arg2="${somedef(5, 12)}"/> + +The "custom tag" format is intended mainly for namespace +functions which recognize body content, which in Mako is known +as a "def with embedded content": + +.. sourcecode:: mako + + <%mynamespace:somefunction arg1="some argument" args="x, y"> + Some record: ${x}, ${y} + + +The "classic" way to call defs with embedded content is the ``<%call>`` tag: + +.. sourcecode:: mako + + <%call expr="mynamespace.somefunction(arg1='some argument')" args="x, y"> + Some record: ${x}, ${y} + + +For information on how to construct defs that embed content from +the caller, see :ref:`defs_with_content`. + +.. _namespaces_python_modules: + +Namespaces from Regular Python Modules +====================================== + +Namespaces can also import regular Python functions from +modules. These callables need to take at least one argument, +``context``, an instance of :class:`.Context`. A module file +``some/module.py`` might contain the callable: + +.. sourcecode:: python + + def my_tag(context): + context.write("hello world") + return '' + +A template can use this module via: + +.. sourcecode:: mako + + <%namespace name="hw" module="some.module"/> + + ${hw.my_tag()} + +Note that the ``context`` argument is not needed in the call; +the :class:`.Namespace` tag creates a locally-scoped callable which +takes care of it. The ``return ''`` is so that the def does not +dump a ``None`` into the output stream -- the return value of any +def is rendered after the def completes, in addition to whatever +was passed to :meth:`.Context.write` within its body. + +If your def is to be called in an "embedded content" context, +that is as described in :ref:`defs_with_content`, you should use +the :func:`.supports_caller` decorator, which will ensure that Mako +will ensure the correct "caller" variable is available when your +def is called, supporting embedded content: + +.. sourcecode:: python + + from mako.runtime import supports_caller + + @supports_caller + def my_tag(context): + context.write("
        ") + context['caller'].body() + context.write("
        ") + return '' + +Capturing of output is available as well, using the +outside-of-templates version of the :func:`.capture` function, +which accepts the "context" as its first argument: + +.. sourcecode:: python + + from mako.runtime import supports_caller, capture + + @supports_caller + def my_tag(context): + return "
        %s
        " % \ + capture(context, context['caller'].body, x="foo", y="bar") + +Declaring Defs in Namespaces +============================ + +The ``<%namespace>`` tag supports the definition of ``<%def>``\ s +directly inside the tag. These defs become part of the namespace +like any other function, and will override the definitions +pulled in from a remote template or module: + +.. sourcecode:: mako + + ## define a namespace + <%namespace name="stuff"> + <%def name="comp1()"> + comp1 + + + + ## then call it + ${stuff.comp1()} + +.. _namespaces_body: + +The ``body()`` Method +===================== + +Every namespace that is generated from a template contains a +method called ``body()``. This method corresponds to the main +body of the template, and plays its most important roles when +using inheritance relationships as well as +def-calls-with-content. + +Since the ``body()`` method is available from a namespace just +like all the other defs defined in a template, what happens if +you send arguments to it? By default, the ``body()`` method +accepts no positional arguments, and for usefulness in +inheritance scenarios will by default dump all keyword arguments +into a dictionary called ``pageargs``. But if you actually want +to get at the keyword arguments, Mako recommends you define your +own argument signature explicitly. You do this via using the +``<%page>`` tag: + +.. sourcecode:: mako + + <%page args="x, y, someval=8, scope='foo', **kwargs"/> + +A template which defines the above signature requires that the +variables ``x`` and ``y`` are defined, defines default values +for ``someval`` and ``scope``, and sets up ``**kwargs`` to +receive all other keyword arguments. If ``**kwargs`` or similar +is not present, the argument ``**pageargs`` gets tacked on by +Mako. When the template is called as a top-level template (i.e. +via :meth:`~.Template.render`) or via the ``<%include>`` tag, the +values for these arguments will be pulled from the ``Context``. +In all other cases, i.e. via calling the ``body()`` method, the +arguments are taken as ordinary arguments from the method call. +So above, the body might be called as: + +.. sourcecode:: mako + + ${self.body(5, y=10, someval=15, delta=7)} + +The :class:`.Context` object also supplies a :attr:`~.Context.kwargs` accessor, for +cases when you'd like to pass along whatever is in the context to +a ``body()`` callable: + +.. sourcecode:: mako + + ${next.body(**context.kwargs)} + +The usefulness of calls like the above become more apparent when +one works with inheriting templates. For more information on +this, as well as the meanings of the names ``self`` and +``next``, see :ref:`inheritance_toplevel`. + +.. _namespaces_builtin: + +Built-in Namespaces +=================== + +The namespace is so great that Mako gives your template one (or +two) for free. The names of these namespaces are ``local`` and +``self``. Other built-in namespaces include ``parent`` and +``next``, which are optional and are described in +:ref:`inheritance_toplevel`. + +.. _namespace_local: + +``local`` +--------- + +The ``local`` namespace is basically the namespace for the +currently executing template. This means that all of the top +level defs defined in your template, as well as your template's +``body()`` function, are also available off of the ``local`` +namespace. + +The ``local`` namespace is also where properties like ``uri``, +``filename``, and ``module`` and the ``get_namespace`` method +can be particularly useful. + +.. _namespace_self: + +``self`` +-------- + +The ``self`` namespace, in the case of a template that does not +use inheritance, is synonymous with ``local``. If inheritance is +used, then ``self`` references the topmost template in the +inheritance chain, where it is most useful for providing the +ultimate form of various "method" calls which may have been +overridden at various points in an inheritance chain. See +:ref:`inheritance_toplevel`. + +Inheritable Namespaces +====================== + +The ``<%namespace>`` tag includes an optional attribute +``inheritable="True"``, which will cause the namespace to be +attached to the ``self`` namespace. Since ``self`` is globally +available throughout an inheritance chain (described in the next +section), all the templates in an inheritance chain can get at +the namespace imported in a super-template via ``self``. + +.. sourcecode:: mako + + ## base.html + <%namespace name="foo" file="foo.html" inheritable="True"/> + + ${next.body()} + + ## somefile.html + <%inherit file="base.html"/> + + ${self.foo.bar()} + +This allows a super-template to load a whole bunch of namespaces +that its inheriting templates can get to, without them having to +explicitly load those namespaces themselves. + +The ``import="*"`` part of the ``<%namespace>`` tag doesn't yet +interact with the ``inheritable`` flag, so currently you have to +use the explicit namespace name off of ``self``, followed by the +desired function name. But more on this in a future release. + +API Reference +============= + +.. autoclass:: mako.runtime.Namespace + :show-inheritance: + :members: + +.. autoclass:: mako.runtime.TemplateNamespace + :show-inheritance: + :members: + +.. autoclass:: mako.runtime.ModuleNamespace + :show-inheritance: + :members: + +.. autofunction:: mako.runtime.supports_caller + +.. autofunction:: mako.runtime.capture + diff --git a/lib3/Mako-0.7.3/doc/build/runtime.rst b/lib3/Mako-0.7.3/doc/build/runtime.rst new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/runtime.rst @@ -0,0 +1,448 @@ +.. _runtime_toplevel: + +============================ +The Mako Runtime Environment +============================ + +This section describes a little bit about the objects and +built-in functions that are available in templates. + +.. _context: + +Context +======= + +The :class:`.Context` is the central object that is created when +a template is first executed, and is responsible for handling +all communication with the outside world. Within the template +environment, it is available via the :ref:`reserved name ` +``context``. The :class:`.Context` includes two +major components, one of which is the output buffer, which is a +file-like object such as Python's ``StringIO`` or similar, and +the other a dictionary of variables that can be freely +referenced within a template; this dictionary is a combination +of the arguments sent to the :meth:`~.Template.render` function and +some built-in variables provided by Mako's runtime environment. + +The Buffer +---------- + +The buffer is stored within the :class:`.Context`, and writing +to it is achieved by calling the :meth:`~.Context.write` method +-- in a template this looks like ``context.write('some string')``. +You usually don't need to care about this, as all text within a template, as +well as all expressions provided by ``${}``, automatically send +everything to this method. The cases you might want to be aware +of its existence are if you are dealing with various +filtering/buffering scenarios, which are described in +:ref:`filtering_toplevel`, or if you want to programmatically +send content to the output stream, such as within a ``<% %>`` +block. + +.. sourcecode:: mako + + <% + context.write("some programmatic text") + %> + +The actual buffer may or may not be the original buffer sent to +the :class:`.Context` object, as various filtering/caching +scenarios may "push" a new buffer onto the context's underlying +buffer stack. For this reason, just stick with +``context.write()`` and content will always go to the topmost +buffer. + +.. _context_vars: + +Context Variables +----------------- + +When your template is compiled into a Python module, the body +content is enclosed within a Python function called +``render_body``. Other top-level defs defined in the template are +defined within their own function bodies which are named after +the def's name with the prefix ``render_`` (i.e. ``render_mydef``). +One of the first things that happens within these functions is +that all variable names that are referenced within the function +which are not defined in some other way (i.e. such as via +assignment, module level imports, etc.) are pulled from the +:class:`.Context` object's dictionary of variables. This is how you're +able to freely reference variable names in a template which +automatically correspond to what was passed into the current +:class:`.Context`. + +* **What happens if I reference a variable name that is not in + the current context?** - The value you get back is a special + value called ``UNDEFINED``, or if the ``strict_undefined=True`` flag + is used a ``NameError`` is raised. ``UNDEFINED`` is just a simple global + variable with the class :class:`mako.runtime.Undefined`. The + ``UNDEFINED`` object throws an error when you call ``str()`` on + it, which is what happens if you try to use it in an + expression. +* **UNDEFINED makes it hard for me to find what name is missing** - An alternative + is to specify the option ``strict_undefined=True`` + to the :class:`.Template` or :class:`.TemplateLookup`. This will cause + any non-present variables to raise an immediate ``NameError`` + which includes the name of the variable in its message + when :meth:`~.Template.render` is called -- ``UNDEFINED`` is not used. + + .. versionadded:: 0.3.6 + +* **Why not just return None?** Using ``UNDEFINED``, or + raising a ``NameError`` is more + explicit and allows differentiation between a value of ``None`` + that was explicitly passed to the :class:`.Context` and a value that + wasn't present at all. +* **Why raise an exception when you call str() on it ? Why not + just return a blank string?** - Mako tries to stick to the + Python philosophy of "explicit is better than implicit". In + this case, it's decided that the template author should be made + to specifically handle a missing value rather than + experiencing what may be a silent failure. Since ``UNDEFINED`` + is a singleton object just like Python's ``True`` or ``False``, + you can use the ``is`` operator to check for it: + + .. sourcecode:: mako + + % if someval is UNDEFINED: + someval is: no value + % else: + someval is: ${someval} + % endif + +Another facet of the :class:`.Context` is that its dictionary of +variables is **immutable**. Whatever is set when +:meth:`~.Template.render` is called is what stays. Of course, since +its Python, you can hack around this and change values in the +context's internal dictionary, but this will probably will not +work as well as you'd think. The reason for this is that Mako in +many cases creates copies of the :class:`.Context` object, which +get sent to various elements of the template and inheriting +templates used in an execution. So changing the value in your +local :class:`.Context` will not necessarily make that value +available in other parts of the template's execution. Examples +of where Mako creates copies of the :class:`.Context` include +within top-level def calls from the main body of the template +(the context is used to propagate locally assigned variables +into the scope of defs; since in the template's body they appear +as inlined functions, Mako tries to make them act that way), and +within an inheritance chain (each template in an inheritance +chain has a different notion of ``parent`` and ``next``, which +are all stored in unique :class:`.Context` instances). + +* **So what if I want to set values that are global to everyone + within a template request?** - All you have to do is provide a + dictionary to your :class:`.Context` when the template first + runs, and everyone can just get/set variables from that. Lets + say its called ``attributes``. + + Running the template looks like: + + .. sourcecode:: python + + output = template.render(attributes={}) + + Within a template, just reference the dictionary: + + .. sourcecode:: mako + + <% + attributes['foo'] = 'bar' + %> + 'foo' attribute is: ${attributes['foo']} + +* **Why can't "attributes" be a built-in feature of the + Context?** - This is an area where Mako is trying to make as + few decisions about your application as it possibly can. + Perhaps you don't want your templates to use this technique of + assigning and sharing data, or perhaps you have a different + notion of the names and kinds of data structures that should + be passed around. Once again Mako would rather ask the user to + be explicit. + +Context Methods and Accessors +----------------------------- + +Significant members of :class:`.Context` include: + +* ``context[key]`` / ``context.get(key, default=None)`` - + dictionary-like accessors for the context. Normally, any + variable you use in your template is automatically pulled from + the context if it isn't defined somewhere already. Use the + dictionary accessor and/or ``get`` method when you want a + variable that *is* already defined somewhere else, such as in + the local arguments sent to a ``%def`` call. If a key is not + present, like a dictionary it raises ``KeyError``. +* ``keys()`` - all the names defined within this context. +* ``kwargs`` - this returns a **copy** of the context's + dictionary of variables. This is useful when you want to + propagate the variables in the current context to a function + as keyword arguments, i.e.: + + .. sourcecode:: mako + + ${next.body(**context.kwargs)} + +* ``write(text)`` - write some text to the current output + stream. +* ``lookup`` - returns the :class:`.TemplateLookup` instance that is + used for all file-lookups within the current execution (even + though individual :class:`.Template` instances can conceivably have + different instances of a :class:`.TemplateLookup`, only the + :class:`.TemplateLookup` of the originally-called :class:`.Template` gets + used in a particular execution). + +.. _loop_context: + +The Loop Context +================ + +Within ``% for`` blocks, the :ref:`reserved name` ``loop`` +is available. ``loop`` tracks the progress of +the ``for`` loop and makes it easy to use the iteration state to control +template behavior: + +.. sourcecode:: mako + +
          + % for a in ("one", "two", "three"): +
        • Item ${loop.index}: ${a}
        • + % endfor +
        + +.. versionadded:: 0.7 + +Iterations +---------- + +Regardless of the type of iterable you're looping over, ``loop`` always tracks +the 0-indexed iteration count (available at ``loop.index``), its parity +(through the ``loop.even`` and ``loop.odd`` bools), and ``loop.first``, a bool +indicating whether the loop is on its first iteration. If your iterable +provides a ``__len__`` method, ``loop`` also provides access to +a count of iterations remaining at ``loop.reverse_index`` and ``loop.last``, +a bool indicating whether the loop is on its last iteration; accessing these +without ``__len__`` will raise a ``TypeError``. + +Cycling +------- + +Cycling is available regardless of whether the iterable you're using provides +a ``__len__`` method. Prior to Mako 0.7, you might have generated a simple +zebra striped list using ``enumerate``: + +.. sourcecode:: mako + +
          + % for i, item in enumerate(('spam', 'ham', 'eggs')): +
        • ${item}
        • + % endfor +
        + +With ``loop.cycle``, you get the same results with cleaner code and less prep work: + +.. sourcecode:: mako + +
          + % for item in ('spam', 'ham', 'eggs'): +
        • ${item}
        • + % endfor +
        + +Both approaches produce output like the following: + +.. sourcecode:: html + +
          +
        • spam
        • +
        • ham
        • +
        • eggs
        • +
        + +Parent Loops +------------ + +Loop contexts can also be transparently nested, and the Mako runtime will do +the right thing and manage the scope for you. You can access the parent loop +context through ``loop.parent``. + +This allows you to reach all the way back up through the loop stack by +chaining ``parent`` attribute accesses, i.e. ``loop.parent.parent....`` as +long as the stack depth isn't exceeded. For example, you can use the parent +loop to make a checkered table: + +.. sourcecode:: mako + +
      + % for consonant in 'pbj': + + % for vowel in 'iou': + + % endfor + + % endfor +
      + ${consonant + vowel}t +
      + +.. sourcecode:: html + + + + + + + + + + + + + + + + + +
      + pit + + pot + + put +
      + bit + + bot + + but +
      + jit + + jot + + jut +
      + +.. _migrating_loop: + +Migrating Legacy Templates that Use the Word "loop" +--------------------------------------------------- + +.. versionchanged:: 0.7 + The ``loop`` name is now :ref:`reserved ` in Mako, + which means a template that refers to a variable named ``loop`` + won't function correctly when used in Mako 0.7. + +To ease the transition for such systems, the feature can be disabled across the board for +all templates, then re-enabled on a per-template basis for those templates which wish +to make use of the new system. + +First, the ``enable_loop=False`` flag is passed to either the :class:`.TemplateLookup` +or :class:`.Template` object in use: + +.. sourcecode:: python + + lookup = TemplateLookup(directories=['/docs'], enable_loop=False) + +or: + +.. sourcecode:: python + + template = Template("some template", enable_loop=False) + +An individual template can make usage of the feature when ``enable_loop`` is set to +``False`` by switching it back on within the ``<%page>`` tag: + +.. sourcecode:: mako + + <%page enable_loop="True"/> + + % for i in collection: + ${i} ${loop.index} + % endfor + +Using the above scheme, it's safe to pass the name ``loop`` to the :meth:`.Template.render` +method as well as to freely make usage of a variable named ``loop`` within a template, provided +the ``<%page>`` tag doesn't override it. New templates that want to use the ``loop`` context +can then set up ``<%page enable_loop="True"/>`` to use the new feature without affecting +old templates. + +All the Built-in Names +====================== + +A one-stop shop for all the names Mako defines. Most of these +names are instances of :class:`.Namespace`, which are described +in the next section, :ref:`namespaces_toplevel`. Also, most of +these names other than ``context``, ``UNDEFINED``, and ``loop`` are +also present *within* the :class:`.Context` itself. The names +``context``, ``loop`` and ``UNDEFINED`` themselves can't be passed +to the context and can't be substituted -- see the section :ref:`reserved_names`. + +* ``context`` - this is the :class:`.Context` object, introduced + at :ref:`context`. +* ``local`` - the namespace of the current template, described + in :ref:`namespaces_builtin`. +* ``self`` - the namespace of the topmost template in an + inheritance chain (if any, otherwise the same as ``local``), + mostly described in :ref:`inheritance_toplevel`. +* ``parent`` - the namespace of the parent template in an + inheritance chain (otherwise undefined); see + :ref:`inheritance_toplevel`. +* ``next`` - the namespace of the next template in an + inheritance chain (otherwise undefined); see + :ref:`inheritance_toplevel`. +* ``caller`` - a "mini" namespace created when using the + ``<%call>`` tag to define a "def call with content"; described + in :ref:`defs_with_content`. +* ``loop`` - this provides access to :class:`.LoopContext` objects when + they are requested within ``% for`` loops, introduced at :ref:`loop_context`. +* ``capture`` - a function that calls a given def and captures + its resulting content into a string, which is returned. Usage + is described in :ref:`filtering_toplevel`. +* ``UNDEFINED`` - a global singleton that is applied to all + otherwise uninitialized template variables that were not + located within the :class:`.Context` when rendering began, + unless the :class:`.Template` flag ``strict_undefined`` + is set to ``True``. ``UNDEFINED`` is + an instance of :class:`.Undefined`, and raises an + exception when its ``__str__()`` method is called. +* ``pageargs`` - this is a dictionary which is present in a + template which does not define any ``**kwargs`` section in its + ``<%page>`` tag. All keyword arguments sent to the ``body()`` + function of a template (when used via namespaces) go here by + default unless otherwise defined as a page argument. If this + makes no sense, it shouldn't; read the section + :ref:`namespaces_body`. + +.. _reserved_names: + +Reserved Names +-------------- + +Mako has a few names that are considered to be "reserved" and can't be used +as variable names. + +.. versionchanged:: 0.7 + Mako raises an error if these words are found passed to the template + as context arguments, whereas in previous versions they'd be silently + ignored or lead to other error messages. + +* ``context`` - see :ref:`context`. +* ``UNDEFINED`` - see :ref:`context_vars`. +* ``loop`` - see :ref:`loop_context`. Note this can be disabled for legacy templates + via the ``enable_loop=False`` argument; see :ref:`migrating_loop`. + +API Reference +============= + +.. autoclass:: mako.runtime.Context + :show-inheritance: + :members: + +.. autoclass:: mako.runtime.LoopContext + :show-inheritance: + :members: + +.. autoclass:: mako.runtime.Undefined + :show-inheritance: + diff --git a/lib3/Mako-0.7.3/doc/build/static/docs.css b/lib3/Mako-0.7.3/doc/build/static/docs.css new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/static/docs.css @@ -0,0 +1,438 @@ +/* global */ + +body { + background-color: #FDFBFC; + margin:38px; + color:#333333; +} + +a { + font-weight:normal; + text-decoration:none; +} + +form { + display:inline; +} + +/* hyperlinks */ + +a:link, a:visited, a:active { + color:#0000FF; +} +a:hover { + color:#700000; + text-decoration:underline; +} + +/* paragraph links after sections. + These aren't visible until hovering + over the tag, then have a + "reverse video" effect over the actual + link + */ + +a.headerlink { + font-size: 0.8em; + padding: 0 4px 0 4px; + text-decoration: none; + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +a.headerlink:hover { + background-color: #990000; + color: white; +} + + +/* Container setup */ + +#docs-container { + max-width:1000px; +} + + +/* header/footer elements */ + +#docs-header h1 { + font-size:20px; + color: #222222; + margin: 0; + padding: 0; +} + +#docs-header { + font-family:Tahoma, Geneva,sans-serif; + + font-size:.9em; + +} + +#docs-top-navigation, +#docs-bottom-navigation { + font-family: Tahoma, Geneva, sans-serif; + background-color: #EEE; + border: solid 1px #CCC; + padding:10px; + font-size:.9em; +} + +#docs-top-navigation { + margin:10px 0px 10px 0px; + line-height:1.2em; +} + +.docs-navigation-links { + font-family:Tahoma, Geneva,sans-serif; +} + +#docs-bottom-navigation { + float:right; + margin: 1em 0 1em 5px; +} + +#docs-copyright { + font-size:.85em; + padding:5px 0px; +} + +#docs-header h1, +#docs-top-navigation h1, +#docs-top-navigation h2 { + font-family:Tahoma,Geneva,sans-serif; + font-weight:normal; +} + +#docs-top-navigation h2 { + margin:16px 4px 7px 5px; + font-size:2em; +} + +#docs-search { + float:right; +} + +#docs-top-page-control { + float:right; + width:350px; +} + +#docs-top-page-control ul { + padding:0; + margin:0; +} + +#docs-top-page-control li { + list-style-type:none; + padding:1px 8px; +} + + +#docs-container .version-num { + font-weight: bold; +} + + +/* content container, sidebar */ + +#docs-body-container { + background-color:#EFEFEF; + border: solid 1px #CCC; + +} + +#docs-body, +#docs-sidebar + { + /*font-family: helvetica, arial, sans-serif; + font-size:.9em;*/ + + font-family: Tahoma, Geneva, sans-serif; + /*font-size:.85em;*/ + line-height:1.5em; + +} + +#docs-sidebar > ul { + font-size:.9em; +} + +#docs-sidebar { + float:left; + width:212px; + padding: 10px 0 0 15px; + /*font-size:.85em;*/ +} + +#docs-sidebar h3, #docs-sidebar h4 { + background-color: #DDDDDD; + color: #222222; + font-family: Tahoma, Geneva,sans-serif; + font-size: 1.1em; + font-weight: normal; + margin: 10px 0 0 -15px; + padding: 5px 10px 5px 10px; + text-shadow: 1px 1px 0 white; + width:210px; +} + +#docs-sidebar h3 a, #docs-sidebar h4 a { + color: #222222; +} +#docs-sidebar ul { + margin: 10px 10px 10px 0px; + padding: 0; + list-style: none outside none; +} + + +#docs-sidebar ul ul { + margin-bottom: 0; + margin-top: 0; + list-style: square outside none; + margin-left: 20px; +} + +#docs-body { + background-color:#FFFFFF; + padding:1px 10px 10px 10px; +} + +#docs-body.withsidebar { + margin: 0 0 0 230px; + border-left:3px solid #DFDFDF; +} + +#docs-body h1, +#docs-body h2, +#docs-body h3, +#docs-body h4 { + font-family:Tahoma, Geneva, sans-serif; +} + +#docs-body h1 { + /* hide the

      for each content section. */ + display:none; + font-size:1.8em; +} + +#docs-body h2 { + font-size:1.6em; +} + +#docs-body h3 { + font-size:1.4em; +} + +/* SQL popup, code styles */ + +.highlight { + background:none; +} + +#docs-container pre { + font-size:1.2em; +} + +#docs-container .pre { + font-size:1.1em; +} + +#docs-container pre { + background-color: #f0f0f0; + border: solid 1px #ccc; + box-shadow: 2px 2px 3px #DFDFDF; + padding:10px; + margin: 5px 0px 5px 0px; + overflow:auto; + line-height:1.3em; +} + +.popup_sql, .show_sql +{ + background-color: #FBFBEE; + padding:5px 10px; + margin:10px -5px; + border:1px dashed; +} + +/* the [SQL] links used to display SQL */ +#docs-container .sql_link +{ + font-weight:normal; + font-family: arial, sans-serif; + font-size:.9em; + text-transform: uppercase; + color:#990000; + border:1px solid; + padding:1px 2px 1px 2px; + margin:0px 10px 0px 15px; + float:right; + line-height:1.2em; +} + +#docs-container a.sql_link, +#docs-container .sql_link +{ + text-decoration: none; + padding:1px 2px; +} + +#docs-container a.sql_link:hover { + text-decoration: none; + color:#fff; + border:1px solid #900; + background-color: #900; +} + +/* docutils-specific elements */ + +th.field-name { + text-align:right; +} + +div.note, div.warning, p.deprecated, div.topic { + background-color:#EEFFEF; +} + + +div.admonition, div.topic, p.deprecated, p.versionadded, p.versionchanged { + border:1px solid #CCCCCC; + padding:5px 10px; + font-size:.9em; + box-shadow: 2px 2px 3px #DFDFDF; +} + +div.warning .admonition-title { + color:#FF0000; +} + +div.admonition .admonition-title, div.topic .topic-title { + font-weight:bold; +} + +.viewcode-back, .viewcode-link { + float:right; +} + +dl.function > dt, +dl.attribute > dt, +dl.classmethod > dt, +dl.method > dt, +dl.class > dt, +dl.exception > dt +{ + background-color:#F0F0F0; + margin:25px -10px 10px 10px; + padding: 0px 10px; +} + +p.versionadded span.versionmodified, +p.versionchanged span.versionmodified, +p.deprecated span.versionmodified { + background-color: #F0F0F0; + font-style: italic; +} + +dt:target, span.highlight { + background-color:#FBE54E; +} + +a.headerlink { + font-size: 0.8em; + padding: 0 4px 0 4px; + text-decoration: none; + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +a.headerlink:hover { + background-color: #00f; + color: white; +} + +.clearboth { + clear:both; +} + +tt.descname { + background-color:transparent; + font-size:1.2em; + font-weight:bold; +} + +tt.descclassname { + background-color:transparent; +} + +tt { + background-color:#ECF0F3; + padding:0 1px; +} + +/* syntax highlighting overrides */ +.k, .kn {color:#0908CE;} +.o {color:#BF0005;} +.go {color:#804049;} + + +/* special "index page" sections + with specific formatting +*/ + +div#sqlalchemy-documentation { + font-size:.95em; +} +div#sqlalchemy-documentation em { + font-style:normal; +} +div#sqlalchemy-documentation .rubric{ + font-size:14px; + background-color:#EEFFEF; + padding:5px; + border:1px solid #BFBFBF; +} +div#sqlalchemy-documentation a, div#sqlalchemy-documentation li { + padding:5px 0px; +} + +div#getting-started { + border-bottom:1px solid; +} + +div#sqlalchemy-documentation div#sqlalchemy-orm { + float:left; + width:48%; +} + +div#sqlalchemy-documentation div#sqlalchemy-core { + float:left; + width:48%; + margin:0; + padding-left:10px; + border-left:1px solid; +} + +div#dialect-documentation { + border-top:1px solid; + /*clear:left;*/ +} diff --git a/lib3/Mako-0.7.3/doc/build/static/makoLogo.png b/lib3/Mako-0.7.3/doc/build/static/makoLogo.png new file mode 100644 index 0000000000000000000000000000000000000000..c43c087eb48ebfc2223b76cf3df2fa7868c2a72b GIT binary patch [stripped] diff --git a/lib3/Mako-0.7.3/doc/build/static/site.css b/lib3/Mako-0.7.3/doc/build/static/site.css new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/static/site.css @@ -0,0 +1,86 @@ +body { + font-family: Tahoma, Geneva, sans-serif; + line-height:1.4em; + margin:15px; + background-color:#FFFFFF; +} +img {border:none;} +a { text-decoration: none;} +a:visited { color: #2929ff;} +a:hover { color: #0000ff;} + +#wrap { + margin:0 auto; + max-width:1024px; + min-width:480px; + position:relative; + +} +h1 { + font-size:1.6em; + font-weight:bold; +} + +h2 { + font-size:1.1em; + font-weight:bold; + margin:10px 0px 10px 0px; +} + +.clearfix{ + clear:both; +} + +.red { + font-weight:bold; + color:#FF0000; +} +.rightbar { + float:right; +} +.slogan { + margin-top:10px; +} +#gittip_nav { + float:right; + margin:10px 0px 0px 0px; +} + +.toolbar { + margin-top:20px; +} +.copyright { + font-size:.8em; + text-align:center; + color:909090; +} +.pylogo { + text-align:right; + float:right; +} +.code { + font-family:monospace; +} + +li { + margin:1px 0px 1px 0px; +} + +.speedchart td { + font-size:small; +} + +pre.codesample { + margin: 1.5em; + padding: .5em; + font-size: .95em; + line-height:1em; + background-color: #eee; + border: 1px solid #ccc; + width:450px; + overflow:auto; +} + +#speedchart { + margin:5px 10px 5px 10px; +} diff --git a/lib3/Mako-0.7.3/doc/build/syntax.rst b/lib3/Mako-0.7.3/doc/build/syntax.rst new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/syntax.rst @@ -0,0 +1,469 @@ +.. _syntax_toplevel: + +====== +Syntax +====== + +A Mako template is parsed from a text stream containing any kind +of content, XML, HTML, email text, etc. The template can further +contain Mako-specific directives which represent variable and/or +expression substitutions, control structures (i.e. conditionals +and loops), server-side comments, full blocks of Python code, as +well as various tags that offer additional functionality. All of +these constructs compile into real Python code. This means that +you can leverage the full power of Python in almost every aspect +of a Mako template. + +Expression Substitution +======================= + +The simplest expression is just a variable substitution. The +syntax for this is the ``${}`` construct, which is inspired by +Perl, Genshi, JSP EL, and others: + +.. sourcecode:: mako + + this is x: ${x} + +Above, the string representation of ``x`` is applied to the +template's output stream. If you're wondering where ``x`` comes +from, it's usually from the :class:`.Context` supplied to the +template's rendering function. If ``x`` was not supplied to the +template and was not otherwise assigned locally, it evaluates to +a special value ``UNDEFINED``. More on that later. + +The contents within the ``${}`` tag are evaluated by Python +directly, so full expressions are OK: + +.. sourcecode:: mako + + pythagorean theorem: ${pow(x,2) + pow(y,2)} + +The results of the expression are evaluated into a string result +in all cases before being rendered to the output stream, such as +the above example where the expression produces a numeric +result. + +Expression Escaping +=================== + +Mako includes a number of built-in escaping mechanisms, +including HTML, URI and XML escaping, as well as a "trim" +function. These escapes can be added to an expression +substitution using the ``|`` operator: + +.. sourcecode:: mako + + ${"this is some text" | u} + +The above expression applies URL escaping to the expression, and +produces ``this+is+some+text``. The ``u`` name indicates URL +escaping, whereas ``h`` represents HTML escaping, ``x`` +represents XML escaping, and ``trim`` applies a trim function. + +Read more about built-in filtering functions, including how to +make your own filter functions, in :ref:`filtering_toplevel`. + +Control Structures +================== + +A control structure refers to all those things that control the +flow of a program -- conditionals (i.e. ``if``/``else``), loops (like +``while`` and ``for``), as well as things like ``try``/``except``. In Mako, +control structures are written using the ``%`` marker followed +by a regular Python control expression, and are "closed" by +using another ``%`` marker with the tag "``end``", where +"````" is the keyword of the expression: + +.. sourcecode:: mako + + % if x==5: + this is some output + % endif + +The ``%`` can appear anywhere on the line as long as no text +precedes it; indentation is not significant. The full range of +Python "colon" expressions are allowed here, including +``if``/``elif``/``else``, ``while``, ``for``, and even ``def``, although +Mako has a built-in tag for defs which is more full-featured. + +.. sourcecode:: mako + + % for a in ['one', 'two', 'three', 'four', 'five']: + % if a[0] == 't': + its two or three + % elif a[0] == 'f': + four/five + % else: + one + % endif + % endfor + +The ``%`` sign can also be "escaped", if you actually want to +emit a percent sign as the first non whitespace character on a +line, by escaping it as in ``%%``: + +.. sourcecode:: mako + + %% some text + + %% some more text + +The Loop Context +---------------- + +The **loop context** provides additional information about a loop +while inside of a ``% for`` structure: + +.. sourcecode:: mako + +
        + % for a in ("one", "two", "three"): +
      • Item ${loop.index}: ${a}
      • + % endfor +
      + +See :ref:`loop_context` for more information on this feature. + +.. versionadded:: 0.7 + +Comments +======== + +Comments come in two varieties. The single line comment uses +``##`` as the first non-space characters on a line: + +.. sourcecode:: mako + + ## this is a comment. + ...text ... + +A multiline version exists using ``<%doc> ...text... ``: + +.. sourcecode:: mako + + <%doc> + these are comments + more comments + + +Newline Filters +=============== + +The backslash ("``\``") character, placed at the end of any +line, will consume the newline character before continuing to +the next line: + +.. sourcecode:: mako + + here is a line that goes onto \ + another line. + +The above text evaluates to: + +.. sourcecode:: text + + here is a line that goes onto another line. + +Python Blocks +============= + +Any arbitrary block of python can be dropped in using the ``<% +%>`` tags: + +.. sourcecode:: mako + + this is a template + <% + x = db.get_resource('foo') + y = [z.element for z in x if x.frobnizzle==5] + %> + % for elem in y: + element: ${elem} + % endfor + +Within ``<% %>``, you're writing a regular block of Python code. +While the code can appear with an arbitrary level of preceding +whitespace, it has to be consistently formatted with itself. +Mako's compiler will adjust the block of Python to be consistent +with the surrounding generated Python code. + +Module-level Blocks +=================== + +A variant on ``<% %>`` is the module-level code block, denoted +by ``<%! %>``. Code within these tags is executed at the module +level of the template, and not within the rendering function of +the template. Therefore, this code does not have access to the +template's context and is only executed when the template is +loaded into memory (which can be only once per application, or +more, depending on the runtime environment). Use the ``<%! %>`` +tags to declare your template's imports, as well as any +pure-Python functions you might want to declare: + +.. sourcecode:: mako + + <%! + import mylib + import re + + def filter(text): + return re.sub(r'^@', '', text) + %> + +Any number of ``<%! %>`` blocks can be declared anywhere in a +template; they will be rendered in the resulting module +in a single contiguous block above all render callables, +in the order in which they appear in the source template. + +Tags +==== + +The rest of what Mako offers takes place in the form of tags. +All tags use the same syntax, which is similar to an XML tag +except that the first character of the tag name is a ``%`` +character. The tag is closed either by a contained slash +character, or an explicit closing tag: + +.. sourcecode:: mako + + <%include file="foo.txt"/> + + <%def name="foo" buffered="True"> + this is a def + + +All tags have a set of attributes which are defined for each +tag. Some of these attributes are required. Also, many +attributes support **evaluation**, meaning you can embed an +expression (using ``${}``) inside the attribute text: + +.. sourcecode:: mako + + <%include file="/foo/bar/${myfile}.txt"/> + +Whether or not an attribute accepts runtime evaluation depends +on the type of tag and how that tag is compiled into the +template. The best way to find out if you can stick an +expression in is to try it! The lexer will tell you if it's not +valid. + +Heres a quick summary of all the tags: + +``<%page>`` +----------- + +This tag defines general characteristics of the template, +including caching arguments, and optional lists of arguments +which the template expects when invoked. + +.. sourcecode:: mako + + <%page args="x, y, z='default'"/> + +Or a page tag that defines caching characteristics: + +.. sourcecode:: mako + + <%page cached="True" cache_type="memory"/> + +Currently, only one ``<%page>`` tag gets used per template, the +rest get ignored. While this will be improved in a future +release, for now make sure you have only one ``<%page>`` tag +defined in your template, else you may not get the results you +want. The details of what ``<%page>`` is used for are described +further in :ref:`namespaces_body` as well as :ref:`caching_toplevel`. + +``<%include>`` +-------------- + +A tag that is familiar from other template languages, ``%include`` +is a regular joe that just accepts a file argument and calls in +the rendered result of that file: + +.. sourcecode:: mako + + <%include file="header.html"/> + + hello world + + <%include file="footer.html"/> + +Include also accepts arguments which are available as ``<%page>`` arguments in the receiving template: + +.. sourcecode:: mako + + <%include file="toolbar.html" args="current_section='members', username='ed'"/> + +``<%def>`` +---------- + +The ``%def`` tag defines a Python function which contains a set +of content, that can be called at some other point in the +template. The basic idea is simple: + +.. sourcecode:: mako + + <%def name="myfunc(x)"> + this is myfunc, x is ${x} + + + ${myfunc(7)} + +The ``%def`` tag is a lot more powerful than a plain Python ``def``, as +the Mako compiler provides many extra services with ``%def`` that +you wouldn't normally have, such as the ability to export defs +as template "methods", automatic propagation of the current +:class:`.Context`, buffering/filtering/caching flags, and def calls +with content, which enable packages of defs to be sent as +arguments to other def calls (not as hard as it sounds). Get the +full deal on what ``%def`` can do in :ref:`defs_toplevel`. + +``<%block>`` +------------ + +``%block`` is a tag that is close to a ``%def``, +except executes itself immediately in its base-most scope, +and can also be anonymous (i.e. with no name): + +.. sourcecode:: mako + + <%block filter="h"> + some stuff. + + +Inspired by Jinja2 blocks, named blocks offer a syntactically pleasing way +to do inheritance: + +.. sourcecode:: mako + + + + <%block name="header"> +

      <%block name="title"/>

      + + ${self.body()} + + + +Blocks are introduced in :ref:`blocks` and further described in :ref:`inheritance_toplevel`. + +.. versionadded:: 0.4.1 + +``<%namespace>`` +---------------- + +``%namespace`` is Mako's equivalent of Python's ``import`` +statement. It allows access to all the rendering functions and +metadata of other template files, plain Python modules, as well +as locally defined "packages" of functions. + +.. sourcecode:: mako + + <%namespace file="functions.html" import="*"/> + +The underlying object generated by ``%namespace``, an instance of +:class:`.mako.runtime.Namespace`, is a central construct used in +templates to reference template-specific information such as the +current URI, inheritance structures, and other things that are +not as hard as they sound right here. Namespaces are described +in :ref:`namespaces_toplevel`. + +``<%inherit>`` +-------------- + +Inherit allows templates to arrange themselves in **inheritance +chains**. This is a concept familiar in many other template +languages. + +.. sourcecode:: mako + + <%inherit file="base.html"/> + +When using the ``%inherit`` tag, control is passed to the topmost +inherited template first, which then decides how to handle +calling areas of content from its inheriting templates. Mako +offers a lot of flexibility in this area, including dynamic +inheritance, content wrapping, and polymorphic method calls. +Check it out in :ref:`inheritance_toplevel`. + +``<%``\ nsname\ ``:``\ defname\ ``>`` +------------------------------------- + +Any user-defined "tag" can be created against +a namespace by using a tag with a name of the form +``<%:>``. The closed and open formats of such a +tag are equivalent to an inline expression and the ``<%call>`` +tag, respectively. + +.. sourcecode:: mako + + <%mynamespace:somedef param="some value"> + this is the body + + +To create custom tags which accept a body, see +:ref:`defs_with_content`. + +.. versionadded:: 0.2.3 + +``<%call>`` +----------- + +The call tag is the "classic" form of a user-defined tag, and is +roughly equivalent to the ``<%namespacename:defname>`` syntax +described above. This tag is also described in :ref:`defs_with_content`. + +``<%doc>`` +---------- + +The ``%doc`` tag handles multiline comments: + +.. sourcecode:: mako + + <%doc> + these are comments + more comments + + +Also the ``##`` symbol as the first non-space characters on a line can be used for single line comments. + +``<%text>`` +----------- + +This tag suspends the Mako lexer's normal parsing of Mako +template directives, and returns its entire body contents as +plain text. It is used pretty much to write documentation about +Mako: + +.. sourcecode:: mako + + <%text filter="h"> + heres some fake mako ${syntax} + <%def name="x()">${x} + + +Returning Early from a Template +=============================== + +Sometimes you want to stop processing a template or ``<%def>`` +method in the middle and just use the text you've accumulated so +far. You can use a ``return`` statement inside a Python +block to do that. + +.. sourcecode:: mako + + % if not len(records): + No records found. + <% return %> + % endif + +Or perhaps: + +.. sourcecode:: mako + + <% + if not len(records): + return + %> + diff --git a/lib3/Mako-0.7.3/doc/build/templates/base.mako b/lib3/Mako-0.7.3/doc/build/templates/base.mako new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/templates/base.mako @@ -0,0 +1,60 @@ + + + + +<%block name="head_title">Mako Templates for Python</%block> +<%block name="headers"> + + + + + + +
      + + + + +
      + + ${next.body()} +
      +<%block name="footer"> +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/build/templates/genindex.mako b/lib3/Mako-0.7.3/doc/build/templates/genindex.mako new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/templates/genindex.mako @@ -0,0 +1,77 @@ +<%inherit file="${context['layout']}"/> + +<%block name="show_title" filter="util.striptags"> + ${_('Index')} + + +

      ${_('Index')}

      + + % for i, (key, dummy) in enumerate(genindexentries): + ${i != 0 and '| ' or ''}${key} + % endfor + +
      + + % for i, (key, entries) in enumerate(genindexentries): +

      ${key}

      +
      +
      + <% + breakat = genindexcounts[i] // 2 + numcols = 1 + numitems = 0 + %> +% for entryname, (links, subitems) in entries: + +
      + % if links: + ${entryname|h} + % for unknown, link in links[1:]: + , [${i}] + % endfor + % else: + ${entryname|h} + % endif +
      + + % if subitems: +
      + % for subentryname, subentrylinks in subitems: +
      ${subentryname|h} + % for j, (unknown, link) in enumerate(subentrylinks[1:]): + [${j}] + % endfor +
      + % endfor +
      + % endif + + <% + numitems = numitems + 1 + len(subitems) + %> + % if numcols <2 and numitems > breakat: + <% + numcols = numcols + 1 + %> +
      + % endif + +% endfor +
      +
      +% endfor + +<%def name="sidebarrel()"> +% if split_index: +

      ${_('Index')}

      +

      + % for i, (key, dummy) in enumerate(genindexentries): + ${i > 0 and '| ' or ''} + ${key} + % endfor +

      + +

      ${_('Full index on one page')}

      +% endif + ${parent.sidebarrel()} + diff --git a/lib3/Mako-0.7.3/doc/build/templates/layout.mako b/lib3/Mako-0.7.3/doc/build/templates/layout.mako new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/templates/layout.mako @@ -0,0 +1,199 @@ +## coding: utf-8 +<%! + local_script_files = [] +%> +<%doc> + Structural elements are all prefixed with "docs-" + to prevent conflicts when the structure is integrated into the + main site. + + docs-container -> + docs-header -> + docs-search + docs-version-header + docs-top-navigation + docs-top-page-control + docs-navigation-banner + docs-body-container -> + docs-sidebar + docs-body + docs-bottom-navigation + docs-copyright + + +<%inherit file="base.mako"/> + +<% +withsidebar = bool(toc) and current_page_name != 'index' +%> + +<%block name="head_title"> + % if current_page_name != 'index': + ${capture(self.show_title) | util.striptags} — + % endif + ${docstitle|h} + + + +
      + +<%block name="headers"> + + + + + % for scriptfile in script_files + self.attr.local_script_files: + + % endfor + % if hasdoc('about'): + + % endif + + + % if hasdoc('copyright'): + + % endif + + % if parents: + + % endif + % if nexttopic: + + % endif + % if prevtopic: + + % endif + + +
      +

      ${docstitle|h}

      + + + +
      + Release: ${release} + +
      + +
      + +
      + + +
      + ${docstitle|h} + % if parents: + % for parent in parents: + ?? ${parent['title']} + % endfor + % endif + % if current_page_name != 'index': + ?? ${self.show_title()} + % endif + +

      + <%block name="show_title"> + ${title} + +

      +
      + +
      + +
      + +% if withsidebar: +
      +

      Table of Contents

      + ${toc} + + % if prevtopic: +

      Previous Topic

      +

      + ${prevtopic['title']} +

      + % endif + % if nexttopic: +

      Next Topic

      +

      + ${nexttopic['title']} +

      + % endif + +

      Quick Search

      +

      +

      +

      + +
      +% endif + +
      + ${next.body()} +
      + +
      + + + +
      diff --git a/lib3/Mako-0.7.3/doc/build/templates/page.mako b/lib3/Mako-0.7.3/doc/build/templates/page.mako new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/templates/page.mako @@ -0,0 +1,2 @@ +<%inherit file="${context['layout']}"/> +${body| util.strip_toplevel_anchors} \ No newline at end of file diff --git a/lib3/Mako-0.7.3/doc/build/templates/rtd_layout.mako b/lib3/Mako-0.7.3/doc/build/templates/rtd_layout.mako new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/templates/rtd_layout.mako @@ -0,0 +1,174 @@ + + +<%inherit file="/layout.mako"/> + +<% + newscript = [] + # strip out script files that RTD wants to provide + for script in script_files: + for token in ("jquery.js", "underscore.js", "doctools.js"): + if token in script: + break + else: + newscript.append(script) + script_files[:] = newscript +%> + +<%block name="headers"> + ${parent.headers()} + + + + + +##{% if using_theme %} +## +##{% endif %} + + + + + +${next.body()} + +<%block name="footer"> + ${parent.footer()} + +## Keep this here, so that the RTD logo doesn't stomp on the bottom of the theme. +
      +
      +
      + + + + + + +% if analytics_code: + + +% endif + + diff --git a/lib3/Mako-0.7.3/doc/build/templates/search.mako b/lib3/Mako-0.7.3/doc/build/templates/search.mako new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/templates/search.mako @@ -0,0 +1,25 @@ +<%inherit file="${context['layout']}"/> + +<%! + local_script_files = ['_static/searchtools.js'] +%> + +<%block name="show_title" filter="util.striptags"> + ${_('Search')} + + +
      +

      Enter Search Terms:

      + +
      + +
      + +<%block name="footer"> + ${parent.footer()} + + diff --git a/lib3/Mako-0.7.3/doc/build/unicode.rst b/lib3/Mako-0.7.3/doc/build/unicode.rst new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/unicode.rst @@ -0,0 +1,345 @@ +.. _unicode_toplevel: + +=================== +The Unicode Chapter +=================== + +The Python language supports two ways of representing what we +know as "strings", i.e. series of characters. In Python 2, the +two types are ``string`` and ``unicode``, and in Python 3 they are +``bytes`` and ``string``. A key aspect of the Python 2 ``string`` and +Python 3 ``bytes`` types are that they contain no information +regarding what **encoding** the data is stored in. For this +reason they were commonly referred to as **byte strings** on +Python 2, and Python 3 makes this name more explicit. The +origins of this come from Python's background of being developed +before the Unicode standard was even available, back when +strings were C-style strings and were just that, a series of +bytes. Strings that had only values below 128 just happened to +be **ASCII** strings and were printable on the console, whereas +strings with values above 128 would produce all kinds of +graphical characters and bells. + +Contrast the "byte-string" type with the "unicode/string" type. +Objects of this latter type are created whenever you say something like +``u"hello world"`` (or in Python 3, just ``"hello world"``). In this +case, Python represents each character in the string internally +using multiple bytes per character (something similar to +UTF-16). What's important is that when using the +``unicode``/``string`` type to store strings, Python knows the +data's encoding; it's in its own internal format. Whereas when +using the ``string``/``bytes`` type, it does not. + +When Python 2 attempts to treat a byte-string as a string, which +means it's attempting to compare/parse its characters, to coerce +it into another encoding, or to decode it to a unicode object, +it has to guess what the encoding is. In this case, it will +pretty much always guess the encoding as ``ascii``... and if the +byte-string contains bytes above value 128, you'll get an error. +Python 3 eliminates much of this confusion by just raising an +error unconditionally if a byte-string is used in a +character-aware context. + +There is one operation that Python *can* do with a non-ASCII +byte-string, and it's a great source of confusion: it can dump the +byte-string straight out to a stream or a file, with nary a care +what the encoding is. To Python, this is pretty much like +dumping any other kind of binary data (like an image) to a +stream somewhere. In Python 2, it is common to see programs that +embed all kinds of international characters and encodings into +plain byte-strings (i.e. using ``"hello world"`` style literals) +can fly right through their run, sending reams of strings out to +wherever they are going, and the programmer, seeing the same +output as was expressed in the input, is now under the illusion +that his or her program is Unicode-compliant. In fact, their +program has no unicode awareness whatsoever, and similarly has +no ability to interact with libraries that *are* unicode aware. +Python 3 makes this much less likely by defaulting to unicode as +the storage format for strings. + +The "pass through encoded data" scheme is what template +languages like Cheetah and earlier versions of Myghty do by +default. Mako as of version 0.2 also supports this mode of +operation when using Python 2, using the ``disable_unicode=True`` +flag. However, when using Mako in its default mode of +unicode-aware, it requires explicitness when dealing with +non-ASCII encodings. Additionally, if you ever need to handle +unicode strings and other kinds of encoding conversions more +intelligently, the usage of raw byte-strings quickly becomes a +nightmare, since you are sending the Python interpreter +collections of bytes for which it can make no intelligent +decisions with regards to encoding. In Python 3 Mako only allows +usage of native, unicode strings. + +In normal Mako operation, all parsed template constructs and +output streams are handled internally as Python ``unicode`` +objects. It's only at the point of :meth:`~.Template.render` that this unicode +stream may be rendered into whatever the desired output encoding +is. The implication here is that the template developer must +:ensure that :ref:`the encoding of all non-ASCII templates is explicit +` (still required in Python 3), +that :ref:`all non-ASCII-encoded expressions are in one way or another +converted to unicode ` +(not much of a burden in Python 3), and that :ref:`the output stream of the +template is handled as a unicode stream being encoded to some +encoding ` (still required in Python 3). + +.. _set_template_file_encoding: + +Specifying the Encoding of a Template File +========================================== + +This is the most basic encoding-related setting, and it is +equivalent to Python's "magic encoding comment", as described in +`pep-0263 `_. Any +template that contains non-ASCII characters requires that this +comment be present so that Mako can decode to unicode (and also +make usage of Python's AST parsing services). Mako's lexer will +use this encoding in order to convert the template source into a +``unicode`` object before continuing its parsing: + +.. sourcecode:: mako + + ## -*- coding: utf-8 -*- + + Alors vous imaginez ma surprise, au lever du jour, quand + une dr??le de petite voix m???a r??veill??. Elle disait: + ?? S???il vous pla??t??? dessine-moi un mouton! ?? + +For the picky, the regular expression used is derived from that +of the above mentioned pep: + +.. sourcecode:: python + + #.*coding[:=]\s*([-\w.]+).*\n + +The lexer will convert to unicode in all cases, so that if any +characters exist in the template that are outside of the +specified encoding (or the default of ``ascii``), the error will +be immediate. + +As an alternative, the template encoding can be specified +programmatically to either :class:`.Template` or :class:`.TemplateLookup` via +the ``input_encoding`` parameter: + +.. sourcecode:: python + + t = TemplateLookup(directories=['./'], input_encoding='utf-8') + +The above will assume all located templates specify ``utf-8`` +encoding, unless the template itself contains its own magic +encoding comment, which takes precedence. + +.. _handling_non_ascii_expressions: + +Handling Expressions +==================== + +The next area that encoding comes into play is in expression +constructs. By default, Mako's treatment of an expression like +this: + +.. sourcecode:: mako + + ${"hello world"} + +looks something like this: + +.. sourcecode:: python + + context.write(unicode("hello world")) + +In Python 3, it's just: + +.. sourcecode:: python + + context.write(str("hello world")) + +That is, **the output of all expressions is run through the +``unicode`` built-in**. This is the default setting, and can be +modified to expect various encodings. The ``unicode`` step serves +both the purpose of rendering non-string expressions into +strings (such as integers or objects which contain ``__str()__`` +methods), and to ensure that the final output stream is +constructed as a unicode object. The main implication of this is +that **any raw byte-strings that contain an encoding other than +ASCII must first be decoded to a Python unicode object**. It +means you can't say this in Python 2: + +.. sourcecode:: mako + + ${"voix m???a r??veill??."} ## error in Python 2! + +You must instead say this: + +.. sourcecode:: mako + + ${u"voix m???a r??veill??."} ## OK ! + +Similarly, if you are reading data from a file that is streaming +bytes, or returning data from some object that is returning a +Python byte-string containing a non-ASCII encoding, you have to +explicitly decode to unicode first, such as: + +.. sourcecode:: mako + + ${call_my_object().decode('utf-8')} + +Note that filehandles acquired by ``open()`` in Python 3 default +to returning "text", that is the decoding is done for you. See +Python 3's documentation for the ``open()`` built-in for details on +this. + +If you want a certain encoding applied to *all* expressions, +override the ``unicode`` builtin with the ``decode`` built-in at the +:class:`.Template` or :class:`.TemplateLookup` level: + +.. sourcecode:: python + + t = Template(templatetext, default_filters=['decode.utf8']) + +Note that the built-in ``decode`` object is slower than the +``unicode`` function, since unlike ``unicode`` it's not a Python +built-in, and it also checks the type of the incoming data to +determine if string conversion is needed first. + +The ``default_filters`` argument can be used to entirely customize +the filtering process of expressions. This argument is described +in :ref:`filtering_default_filters`. + +.. _defining_output_encoding: + +Defining Output Encoding +======================== + +Now that we have a template which produces a pure unicode output +stream, all the hard work is done. We can take the output and do +anything with it. + +As stated in the :doc:`"Usage" chapter `, both :class:`.Template` and +:class:`.TemplateLookup` accept ``output_encoding`` and ``encoding_errors`` +parameters which can be used to encode the output in any Python +supported codec: + +.. sourcecode:: python + + from mako.template import Template + from mako.lookup import TemplateLookup + + mylookup = TemplateLookup(directories=['/docs'], output_encoding='utf-8', encoding_errors='replace') + + mytemplate = mylookup.get_template("foo.txt") + print mytemplate.render() + +:meth:`~.Template.render` will return a ``bytes`` object in Python 3 if an output +encoding is specified. By default it performs no encoding and +returns a native string. + +:meth:`~.Template.render_unicode` will return the template output as a Python +``unicode`` object (or ``string`` in Python 3): + +.. sourcecode:: python + + print mytemplate.render_unicode() + +The above method disgards the output encoding keyword argument; +you can encode yourself by saying: + +.. sourcecode:: python + + print mytemplate.render_unicode().encode('utf-8', 'replace') + +Buffer Selection +---------------- + +Mako does play some games with the style of buffering used +internally, to maximize performance. Since the buffer is by far +the most heavily used object in a render operation, it's +important! + +When calling :meth:`~.Template.render` on a template that does not specify any +output encoding (i.e. it's ``ascii``), Python's ``cStringIO`` module, +which cannot handle encoding of non-ASCII ``unicode`` objects +(even though it can send raw byte-strings through), is used for +buffering. Otherwise, a custom Mako class called +``FastEncodingBuffer`` is used, which essentially is a super +dumbed-down version of ``StringIO`` that gathers all strings into +a list and uses ``u''.join(elements)`` to produce the final output +-- it's markedly faster than ``StringIO``. + +.. _unicode_disabled: + +Saying to Heck with It: Disabling the Usage of Unicode Entirely +=============================================================== + +Some segments of Mako's userbase choose to make no usage of +Unicode whatsoever, and instead would prefer the "pass through" +approach; all string expressions in their templates return +encoded byte-strings, and they would like these strings to pass +right through. The only advantage to this approach is that +templates need not use ``u""`` for literal strings; there's an +arguable speed improvement as well since raw byte-strings +generally perform slightly faster than unicode objects in +Python. For these users, assuming they're sticking with Python +2, they can hit the ``disable_unicode=True`` flag as so: + +.. sourcecode:: python + + # -*- encoding:utf-8 -*- + from mako.template import Template + + t = Template("dr??le de petite voix m???a r??veill??.", disable_unicode=True, input_encoding='utf-8') + print t.code + +The ``disable_unicode`` mode is strictly a Python 2 thing. It is +not supported at all in Python 3. + +The generated module source code will contain elements like +these: + +.. sourcecode:: python + + # -*- encoding:utf-8 -*- + # ...more generated code ... + + def render_body(context,**pageargs): + context.caller_stack.push_frame() + try: + __M_locals = dict(pageargs=pageargs) + # SOURCE LINE 1 + context.write('dr\xc3\xb4le de petite voix m\xe2\x80\x99a r\xc3\xa9veill\xc3\xa9.') + return '' + finally: + context.caller_stack.pop_frame() + +Where above that the string literal used within :meth:`.Context.write` +is a regular byte-string. + +When ``disable_unicode=True`` is turned on, the ``default_filters`` +argument which normally defaults to ``["unicode"]`` now defaults +to ``["str"]`` instead. Setting ``default_filters`` to the empty list +``[]`` can remove the overhead of the ``str`` call. Also, in this +mode you **cannot** safely call :meth:`~.Template.render_unicode` -- you'll get +unicode/decode errors. + +The ``h`` filter (HTML escape) uses a less performant pure Python +escape function in non-unicode mode. This because +MarkupSafe only supports Python unicode objects for non-ASCII +strings. + +.. versionchanged:: 0.3.4 + In prior versions, it used ``cgi.escape()``, which has been replaced + with a function that also escapes single quotes. + +Rules for using ``disable_unicode=True`` +---------------------------------------- + +* Don't use this mode unless you really, really want to and you + absolutely understand what you're doing. +* Don't use this option just because you don't want to learn to + use Unicode properly; we aren't supporting user issues in this + mode of operation. We will however offer generous help for the + vast majority of users who stick to the Unicode program. +* Python 3 is unicode by default, and the flag is not available + when running on Python 3. + diff --git a/lib3/Mako-0.7.3/doc/build/usage.rst b/lib3/Mako-0.7.3/doc/build/usage.rst new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/build/usage.rst @@ -0,0 +1,520 @@ +.. _usage_toplevel: + +===== +Usage +===== + +Basic Usage +=========== + +This section describes the Python API for Mako templates. If you +are using Mako within a web framework such as Pylons, the work +of integrating Mako's API is already done for you, in which case +you can skip to the next section, :ref:`syntax_toplevel`. + +The most basic way to create a template and render it is through +the :class:`.Template` class: + +.. sourcecode:: python + + from mako.template import Template + + mytemplate = Template("hello world!") + print mytemplate.render() + +Above, the text argument to :class:`.Template` is **compiled** into a +Python module representation. This module contains a function +called ``render_body()``, which produces the output of the +template. When ``mytemplate.render()`` is called, Mako sets up a +runtime environment for the template and calls the +``render_body()`` function, capturing the output into a buffer and +returning its string contents. + + +The code inside the ``render_body()`` function has access to a +namespace of variables. You can specify these variables by +sending them as additional keyword arguments to the :meth:`~.Template.render` +method: + +.. sourcecode:: python + + from mako.template import Template + + mytemplate = Template("hello, ${name}!") + print mytemplate.render(name="jack") + +The :meth:`~.Template.render` method calls upon Mako to create a +:class:`.Context` object, which stores all the variable names accessible +to the template and also stores a buffer used to capture output. +You can create this :class:`.Context` yourself and have the template +render with it, using the :meth:`~.Template.render_context` method: + +.. sourcecode:: python + + from mako.template import Template + from mako.runtime import Context + from StringIO import StringIO + + mytemplate = Template("hello, ${name}!") + buf = StringIO() + ctx = Context(buf, name="jack") + mytemplate.render_context(ctx) + print buf.getvalue() + +Using File-Based Templates +========================== + +A :class:`.Template` can also load its template source code from a file, +using the ``filename`` keyword argument: + +.. sourcecode:: python + + from mako.template import Template + + mytemplate = Template(filename='/docs/mytmpl.txt') + print mytemplate.render() + +For improved performance, a :class:`.Template` which is loaded from a +file can also cache the source code to its generated module on +the filesystem as a regular Python module file (i.e. a ``.py`` +file). To do this, just add the ``module_directory`` argument to +the template: + +.. sourcecode:: python + + from mako.template import Template + + mytemplate = Template(filename='/docs/mytmpl.txt', module_directory='/tmp/mako_modules') + print mytemplate.render() + +When the above code is rendered, a file +``/tmp/mako_modules/docs/mytmpl.txt.py`` is created containing the +source code for the module. The next time a :class:`.Template` with the +same arguments is created, this module file will be +automatically re-used. + +.. _usage_templatelookup: + +Using ``TemplateLookup`` +======================== + +All of the examples thus far have dealt with the usage of a +single :class:`.Template` object. If the code within those templates +tries to locate another template resource, it will need some way +to find them, using simple URI strings. For this need, the +resolution of other templates from within a template is +accomplished by the :class:`.TemplateLookup` class. This class is +constructed given a list of directories in which to search for +templates, as well as keyword arguments that will be passed to +the :class:`.Template` objects it creates: + +.. sourcecode:: python + + from mako.template import Template + from mako.lookup import TemplateLookup + + mylookup = TemplateLookup(directories=['/docs']) + mytemplate = Template("""<%include file="header.txt"/> hello world!""", lookup=mylookup) + +Above, we created a textual template which includes the file +``"header.txt"``. In order for it to have somewhere to look for +``"header.txt"``, we passed a :class:`.TemplateLookup` object to it, which +will search in the directory ``/docs`` for the file ``"header.txt"``. + +Usually, an application will store most or all of its templates +as text files on the filesystem. So far, all of our examples +have been a little bit contrived in order to illustrate the +basic concepts. But a real application would get most or all of +its templates directly from the :class:`.TemplateLookup`, using the +aptly named :meth:`~.TemplateLookup.get_template` method, which accepts the URI of the +desired template: + +.. sourcecode:: python + + from mako.template import Template + from mako.lookup import TemplateLookup + + mylookup = TemplateLookup(directories=['/docs'], module_directory='/tmp/mako_modules') + + def serve_template(templatename, **kwargs): + mytemplate = mylookup.get_template(templatename) + print mytemplate.render(**kwargs) + +In the example above, we create a :class:`.TemplateLookup` which will +look for templates in the ``/docs`` directory, and will store +generated module files in the ``/tmp/mako_modules`` directory. The +lookup locates templates by appending the given URI to each of +its search directories; so if you gave it a URI of +``/etc/beans/info.txt``, it would search for the file +``/docs/etc/beans/info.txt``, else raise a :class:`.TopLevelNotFound` +exception, which is a custom Mako exception. + +When the lookup locates templates, it will also assign a ``uri`` +property to the :class:`.Template` which is the URI passed to the +:meth:`~.TemplateLookup.get_template()` call. :class:`.Template` uses this URI to calculate the +name of its module file. So in the above example, a +``templatename`` argument of ``/etc/beans/info.txt`` will create a +module file ``/tmp/mako_modules/etc/beans/info.txt.py``. + +Setting the Collection Size +--------------------------- + +The :class:`.TemplateLookup` also serves the important need of caching a +fixed set of templates in memory at a given time, so that +successive URI lookups do not result in full template +compilations and/or module reloads on each request. By default, +the :class:`.TemplateLookup` size is unbounded. You can specify a fixed +size using the ``collection_size`` argument: + +.. sourcecode:: python + + mylookup = TemplateLookup(directories=['/docs'], + module_directory='/tmp/mako_modules', collection_size=500) + +The above lookup will continue to load templates into memory +until it reaches a count of around 500. At that point, it will +clean out a certain percentage of templates using a least +recently used scheme. + +Setting Filesystem Checks +------------------------- + +Another important flag on :class:`.TemplateLookup` is +``filesystem_checks``. This defaults to ``True``, and says that each +time a template is returned by the :meth:`~.TemplateLookup.get_template()` method, the +revision time of the original template file is checked against +the last time the template was loaded, and if the file is newer +will reload its contents and recompile the template. On a +production system, setting ``filesystem_checks`` to ``False`` can +afford a small to moderate performance increase (depending on +the type of filesystem used). + +.. _usage_unicode: + +Using Unicode and Encoding +========================== + +Both :class:`.Template` and :class:`.TemplateLookup` accept ``output_encoding`` +and ``encoding_errors`` parameters which can be used to encode the +output in any Python supported codec: + +.. sourcecode:: python + + from mako.template import Template + from mako.lookup import TemplateLookup + + mylookup = TemplateLookup(directories=['/docs'], output_encoding='utf-8', encoding_errors='replace') + + mytemplate = mylookup.get_template("foo.txt") + print mytemplate.render() + +When using Python 3, the :meth:`~.Template.render` method will return a ``bytes`` +object, **if** ``output_encoding`` is set. Otherwise it returns a +``string``. + +Additionally, the :meth:`~.Template.render_unicode()` method exists which will +return the template output as a Python ``unicode`` object, or in +Python 3 a ``string``: + +.. sourcecode:: python + + print mytemplate.render_unicode() + +The above method disregards the output encoding keyword +argument; you can encode yourself by saying: + +.. sourcecode:: python + + print mytemplate.render_unicode().encode('utf-8', 'replace') + +Note that Mako's ability to return data in any encoding and/or +``unicode`` implies that the underlying output stream of the +template is a Python unicode object. This behavior is described +fully in :ref:`unicode_toplevel`. + +.. _handling_exceptions: + +Handling Exceptions +=================== + +Template exceptions can occur in two distinct places. One is +when you **lookup, parse and compile** the template, the other +is when you **run** the template. Within the running of a +template, exceptions are thrown normally from whatever Python +code originated the issue. Mako has its own set of exception +classes which mostly apply to the lookup and lexer/compiler +stages of template construction. Mako provides some library +routines that can be used to help provide Mako-specific +information about any exception's stack trace, as well as +formatting the exception within textual or HTML format. In all +cases, the main value of these handlers is that of converting +Python filenames, line numbers, and code samples into Mako +template filenames, line numbers, and code samples. All lines +within a stack trace which correspond to a Mako template module +will be converted to be against the originating template file. + +To format exception traces, the :func:`.text_error_template` and +:func:`.html_error_template` functions are provided. They make usage of +``sys.exc_info()`` to get at the most recently thrown exception. +Usage of these handlers usually looks like: + +.. sourcecode:: python + + from mako import exceptions + + try: + template = lookup.get_template(uri) + print template.render() + except: + print exceptions.text_error_template().render() + +Or for the HTML render function: + +.. sourcecode:: python + + from mako import exceptions + + try: + template = lookup.get_template(uri) + print template.render() + except: + print exceptions.html_error_template().render() + +The :func:`.html_error_template` template accepts two options: +specifying ``full=False`` causes only a section of an HTML +document to be rendered. Specifying ``css=False`` will disable the +default stylesheet from being rendered. + +E.g.: + +.. sourcecode:: python + + print exceptions.html_error_template().render(full=False) + +The HTML render function is also available built-in to +:class:`.Template` using the ``format_exceptions`` flag. In this case, any +exceptions raised within the **render** stage of the template +will result in the output being substituted with the output of +:func:`.html_error_template`: + +.. sourcecode:: python + + template = Template(filename="/foo/bar", format_exceptions=True) + print template.render() + +Note that the compile stage of the above template occurs when +you construct the :class:`.Template` itself, and no output stream is +defined. Therefore exceptions which occur within the +lookup/parse/compile stage will not be handled and will +propagate normally. While the pre-render traceback usually will +not include any Mako-specific lines anyway, it will mean that +exceptions which occur previous to rendering and those which +occur within rendering will be handled differently... so the +``try``/``except`` patterns described previously are probably of more +general use. + +The underlying object used by the error template functions is +the :class:`.RichTraceback` object. This object can also be used +directly to provide custom error views. Here's an example usage +which describes its general API: + +.. sourcecode:: python + + from mako.exceptions import RichTraceback + + try: + template = lookup.get_template(uri) + print template.render() + except: + traceback = RichTraceback() + for (filename, lineno, function, line) in traceback.traceback: + print "File %s, line %s, in %s" % (filename, lineno, function) + print line, "\n" + print "%s: %s" % (str(traceback.error.__class__.__name__), traceback.error) + +Common Framework Integrations +============================= + +The Mako distribution includes a little bit of helper code for +the purpose of using Mako in some popular web framework +scenarios. This is a brief description of what's included. + +WSGI +---- + +A sample WSGI application is included in the distribution in the +file ``examples/wsgi/run_wsgi.py``. This runner is set up to pull +files from a `templates` as well as an `htdocs` directory and +includes a rudimental two-file layout. The WSGI runner acts as a +fully functional standalone web server, using ``wsgiutils`` to run +itself, and propagates GET and POST arguments from the request +into the :class:`.Context`, can serve images, CSS files and other kinds +of files, and also displays errors using Mako's included +exception-handling utilities. + +Pygments +-------- + +A `Pygments `_-compatible syntax +highlighting module is included under :mod:`mako.ext.pygmentplugin`. +This module is used in the generation of Mako documentation and +also contains various `setuptools` entry points under the heading +``pygments.lexers``, including ``mako``, ``html+mako``, ``xml+mako`` +(see the ``setup.py`` file for all the entry points). + +Babel +----- + +Mako provides support for extracting `gettext` messages from +templates via a `Babel`_ extractor +entry point under ``mako.ext.babelplugin``. + +`Gettext` messages are extracted from all Python code sections, +including those of control lines and expressions embedded +in tags. + +`Translator +comments `_ +may also be extracted from Mako templates when a comment tag is +specified to `Babel`_ (such as with +the ``-c`` option). + +For example, a project ``"myproj"`` contains the following Mako +template at ``myproj/myproj/templates/name.html``: + +.. sourcecode:: mako + +
      + Name: + ## TRANSLATORS: This is a proper name. See the gettext + ## manual, section Names. + ${_('Francois Pinard')} +
      + +To extract gettext messages from this template the project needs +a Mako section in its `Babel Extraction Method Mapping +file `_ +(typically located at ``myproj/babel.cfg``): + +.. sourcecode:: cfg + + # Extraction from Python source files + + [python: myproj/**.py] + + # Extraction from Mako templates + + [mako: myproj/templates/**.html] + input_encoding = utf-8 + +The Mako extractor supports an optional ``input_encoding`` +parameter specifying the encoding of the templates (identical to +:class:`.Template`/:class:`.TemplateLookup`'s ``input_encoding`` parameter). + +Invoking `Babel`_'s extractor at the +command line in the project's root directory: + +.. sourcecode:: sh + + myproj$ pybabel extract -F babel.cfg -c "TRANSLATORS:" . + +will output a `gettext` catalog to `stdout` including the following: + +.. sourcecode:: pot + + #. TRANSLATORS: This is a proper name. See the gettext + #. manual, section Names. + #: myproj/templates/name.html:5 + msgid "Francois Pinard" + msgstr "" + +This is only a basic example: +`Babel`_ can be invoked from ``setup.py`` +and its command line options specified in the accompanying +``setup.cfg`` via `Babel Distutils/Setuptools +Integration `_. + +Comments must immediately precede a `gettext` message to be +extracted. In the following case the ``TRANSLATORS:`` comment would +not have been extracted: + +.. sourcecode:: mako + +
      + ## TRANSLATORS: This is a proper name. See the gettext + ## manual, section Names. + Name: ${_('Francois Pinard')} +
      + +See the `Babel User +Guide `_ +for more information. + +.. _babel: http://babel.edgewall.org/ + + +API Reference +============= + +.. autoclass:: mako.template.Template + :show-inheritance: + :members: + +.. autoclass:: mako.template.DefTemplate + :show-inheritance: + :members: + +.. autoclass:: mako.lookup.TemplateCollection + :show-inheritance: + :members: + +.. autoclass:: mako.lookup.TemplateLookup + :show-inheritance: + :members: + +.. autoclass:: mako.exceptions.RichTraceback + :show-inheritance: + + .. py:attribute:: error + + the exception instance. + + .. py:attribute:: message + + the exception error message as unicode. + + .. py:attribute:: source + + source code of the file where the error occurred. + If the error occurred within a compiled template, + this is the template source. + + .. py:attribute:: lineno + + line number where the error occurred. If the error + occurred within a compiled template, the line number + is adjusted to that of the template source. + + .. py:attribute:: records + + a list of 8-tuples containing the original + python traceback elements, plus the + filename, line number, source line, and full template source + for the traceline mapped back to its originating source + template, if any for that traceline (else the fields are ``None``). + + .. py:attribute:: reverse_records + + the list of records in reverse + traceback -- a list of 4-tuples, in the same format as a regular + python traceback, with template-corresponding + traceback records replacing the originals. + + .. py:attribute:: reverse_traceback + + the traceback list in reverse. + +.. autofunction:: mako.exceptions.html_error_template + +.. autofunction:: mako.exceptions.text_error_template + diff --git a/lib3/Mako-0.7.3/doc/caching.html b/lib3/Mako-0.7.3/doc/caching.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/caching.html @@ -0,0 +1,779 @@ + + + + + + + Caching + — + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + ?? + Caching + + +

      + + Caching + +

      +
      + +
      + +
      + + + +
      + +
      +

      Caching??

      +

      Any template or component can be cached using the cache +argument to the <%page>, <%def> or <%block> directives:

      +
      <%page cached="True"/>
      +
      +template text
      +
      +
      +

      The above template, after being executed the first time, will +store its content within a cache that by default is scoped +within memory. Subsequent calls to the template’s render() +method will return content directly from the cache. When the +Template object itself falls out of scope, its corresponding +cache is garbage collected along with the template.

      +

      By default, caching requires that the Beaker package be installed on the +system, however the mechanism of caching can be customized to use +any third party or user defined system – see Cache Plugins.

      +

      In addition to being available on the <%page> tag, the caching flag and all +its options can be used with the <%def> tag as well:

      +
      <%def name="mycomp" cached="True" cache_timeout="60">
      +    other text
      +</%def>
      +
      +
      +

      ... and equivalently with the <%block> tag, anonymous or named:

      +
      <%block cached="True" cache_timeout="60">
      +    other text
      +</%block>
      +
      +
      +
      +

      Cache Arguments??

      +

      Mako has two cache arguments available on tags that are +available in all cases. The rest of the arguments +available are specific to a backend.

      +

      The two generic tags arguments are:

      +
        +
      • cached="True" - enable caching for this <%page>, +<%def>, or <%block>.

        +
      • +
      • cache_key - the “key” used to uniquely identify this content +in the cache. Usually, this key is chosen automatically +based on the name of the rendering callable (i.e. body +when used in <%page>, the name of the def when using <%def>, +the explicit or internally-generated name when using <%block>). +Using the cache_key parameter, the key can be overridden +using a fixed or programmatically generated value.

        +

        For example, here’s a page +that caches any page which inherits from it, based on the +filename of the calling template:

        +
        <%page cached="True" cache_key="${self.filename}"/>
        +
        +${next.body()}
        +
        +## rest of template
        +
        +
        +
      • +
      +

      On a Template or TemplateLookup, the +caching can be configured using these arguments:

      +
        +
      • cache_enabled - Setting this +to False will disable all caching functionality +when the template renders. Defaults to True. +e.g.:

        +
        lookup = TemplateLookup(
        +                directories='/path/to/templates',
        +                cache_enabled = False
        +                )
        +
        +
        +
      • +
      • cache_impl - The string name of the cache backend +to use. This defaults to 'beaker', which has historically +been the only cache backend supported by Mako.

        +

        +New in version 0.6.0.

        +

        For example, here’s how to use the upcoming +dogpile.cache +backend:

        +
        lookup = TemplateLookup(
        +                directories='/path/to/templates',
        +                cache_impl = 'dogpile.cache',
        +                cache_args = {'regions':my_dogpile_regions}
        +                )
        +
        +
        +
      • +
      • cache_args - A dictionary of cache parameters that +will be consumed by the cache backend. See +Using the Beaker Cache Backend for examples.

        +

        +New in version 0.6.0.

        +
      • +
      +
      +

      Backend-Specific Cache Arguments??

      +

      The <%page>, <%def>, and <%block> tags +accept any named argument that starts with the prefix "cache_". +Those arguments are then packaged up and passed along to the +underlying caching implementation, minus the "cache_" prefix.

      +

      The actual arguments understood are determined by the backend.

      + +
      +
      +

      Using the Beaker Cache Backend??

      +

      When using Beaker, new implementations will want to make usage +of cache regions so that cache configurations can be maintained +externally to templates. These configurations live under +named “regions” that can be referred to within templates themselves.

      +

      +New in version 0.6.0: Support for Beaker cache regions.

      +

      For example, suppose we would like two regions. One is a “short term” +region that will store content in a memory-based dictionary, +expiring after 60 seconds. The other is a Memcached region, +where values should expire in five minutes. To configure +our TemplateLookup, first we get a handle to a +beaker.cache.CacheManager:

      +
      from beaker.cache import CacheManager
      +
      +manager = CacheManager(cache_regions={
      +    'short_term':{
      +        'type': 'memory',
      +        'expire': 60
      +    },
      +    'long_term':{
      +        'type': 'ext:memcached',
      +        'url': '127.0.0.1:11211',
      +        'expire': 300
      +    }
      +})
      +
      +lookup = TemplateLookup(
      +                directories=['/path/to/templates'],
      +                module_directory='/path/to/modules',
      +                cache_impl='beaker',
      +                cache_args={
      +                    'manager':manager
      +                }
      +        )
      +
      +
      +

      Our templates can then opt to cache data in one of either region, +using the cache_region argument. Such as using short_term +at the <%page> level:

      +
      <%page cached="True" cache_region="short_term">
      +
      +## ...
      +
      +
      +

      Or, long_term at the <%block> level:

      +
      <%block name="header" cached="True" cache_region="long_term">
      +    other text
      +</%block>
      +
      +
      +

      The Beaker backend also works without regions. There are a +variety of arguments that can be passed to the cache_args +dictionary, which are also allowable in templates via the +<%page>, <%block>, +and <%def> tags specific to those sections. The values +given override those specified at the TemplateLookup +or Template level.

      +

      With the possible exception +of cache_timeout, these arguments are probably better off +staying at the template configuration level. Each argument +specified as cache_XYZ in a template tag is specified +without the cache_ prefix in the cache_args dictionary:

      +
        +
      • cache_timeout - number of seconds in which to invalidate the +cached data. After this timeout, the content is re-generated +on the next call. Available as timeout in the cache_args +dictionary.
      • +
      • cache_type - type of caching. 'memory', 'file', 'dbm', or +'ext:memcached' (note that the string memcached is +also accepted by the dogpile.cache Mako plugin, though not by Beaker itself). +Available as type in the cache_args dictionary.
      • +
      • cache_url - (only used for memcached but required) a single +IP address or a semi-colon separated list of IP address of +memcache servers to use. Available as url in the cache_args +dictionary.
      • +
      • cache_dir - in the case of the 'file' and 'dbm' cache types, +this is the filesystem directory with which to store data +files. If this option is not present, the value of +module_directory is used (i.e. the directory where compiled +template modules are stored). If neither option is available +an exception is thrown. Available as dir in the +cache_args dictionary.
      • +
      +
      +
      +

      Using the dogpile.cache Backend??

      +

      dogpile.cache is a new replacement for Beaker. It provides +a modernized, slimmed down interface and is generally easier to use +than Beaker. As of this writing it has not yet been released. dogpile.cache +includes its own Mako cache plugin – see dogpile.cache.plugins.mako_cache in the +dogpile.cache documentation.

      +
      +
      +
      +

      Programmatic Cache Access??

      +

      The Template, as well as any template-derived Namespace, has +an accessor called cache which returns the Cache object +for that template. This object is a facade on top of the underlying +CacheImpl object, and provides some very rudimental +capabilities, such as the ability to get and put arbitrary +values:

      +
      <%
      +    local.cache.set("somekey", type="memory", "somevalue")
      +%>
      +
      +
      +

      Above, the cache associated with the local namespace is +accessed and a key is placed within a memory cache.

      +

      More commonly, the cache object is used to invalidate cached +sections programmatically:

      +
      template = lookup.get_template('/sometemplate.html')
      +
      +# invalidate the "body" of the template
      +template.cache.invalidate_body()
      +
      +# invalidate an individual def
      +template.cache.invalidate_def('somedef')
      +
      +# invalidate an arbitrary key
      +template.cache.invalidate('somekey')
      +
      +
      +

      You can access any special method or attribute of the CacheImpl +itself using the impl attribute:

      +
      template.cache.impl.do_something_special()
      +
      +
      +

      Note that using implementation-specific methods will mean you can’t +swap in a different kind of CacheImpl implementation at a +later time.

      +
      +
      +

      Cache Plugins??

      +

      The mechanism used by caching can be plugged in +using a CacheImpl subclass. This class implements +the rudimental methods Mako needs to implement the caching +API. Mako includes the BeakerCacheImpl class to +provide the default implementation. A CacheImpl class +is acquired by Mako using a pkg_resources entrypoint, using +the name given as the cache_impl argument to Template +or TemplateLookup. This entry point can be +installed via the standard setuptools/setup() procedure, underneath +the EntryPoint group named "mako.cache". It can also be +installed at runtime via a convenience installer register_plugin() +which accomplishes essentially the same task.

      +

      An example plugin that implements a local dictionary cache:

      +
      from mako.cache import Cacheimpl, register_plugin
      +
      +class SimpleCacheImpl(CacheImpl):
      +    def __init__(self, cache):
      +        super(SimpleCacheImpl, self).__init__(cache)
      +        self._cache = {}
      +
      +    def get_or_create(self, key, creation_function, **kw):
      +        if key in self._cache:
      +            return self._cache[key]
      +        else:
      +            self._cache[key] = value = creation_function()
      +            return value
      +
      +    def set(self, key, value, **kwargs):
      +        self._cache[key] = value
      +
      +    def get(self, key, **kwargs):
      +        return self._cache.get(key)
      +
      +    def invalidate(self, key, **kwargs):
      +        self._cache.pop(key, None)
      +
      +# optional - register the class locally
      +register_plugin("simple", __name__, "SimpleCacheImpl")
      +
      +
      +

      Enabling the above plugin in a template would look like:

      +
      t = Template("mytemplate",
      +             file="mytemplate.html",
      +             cache_impl='simple')
      +
      +
      +
      +

      Guidelines for Writing Cache Plugins??

      +
        +
      • The CacheImpl is created on a per-Template basis. The +class should ensure that only data for the parent Template is +persisted or returned by the cache methods. The actual Template +is available via the self.cache.template attribute. The self.cache.id +attribute, which is essentially the unique modulename of the template, is +a good value to use in order to represent a unique namespace of keys specific +to the template.
      • +
      • Templates only use the CacheImpl.get_or_create() method +in an implicit fashion. The CacheImpl.set(), +CacheImpl.get(), and CacheImpl.invalidate() methods are +only used in response to direct programmatic access to the corresponding +methods on the Cache object.
      • +
      • CacheImpl will be accessed in a multithreaded fashion if the +Template itself is used multithreaded. Care should be taken +to ensure caching implementations are threadsafe.
      • +
      • A library like Dogpile, which +is a minimal locking system derived from Beaker, can be used to help +implement the CacheImpl.get_or_create() method in a threadsafe +way that can maximize effectiveness across multiple threads as well +as processes. CacheImpl.get_or_create() is the +key method used by templates.
      • +
      • All arguments passed to **kw come directly from the parameters +inside the <%def>, <%block>, or <%page> tags directly, +minus the "cache_" prefix, as strings, with the exception of +the argument cache_timeout, which is passed to the plugin +as the name timeout with the value converted to an integer. +Arguments present in cache_args on Template or +TemplateLookup are passed directly, but are superseded +by those present in the most specific template tag.
      • +
      • The directory where Template places module files can +be acquired using the accessor self.cache.template.module_directory. +This directory can be a good place to throw cache-related work +files, underneath a prefix like _my_cache_work so that name +conflicts with generated modules don’t occur.
      • +
      +
      +
      +
      +

      API Reference??

      +
      +
      +class mako.cache.Cache(template, *args)??
      +

      Bases: object

      +

      Represents a data content cache made available to the module +space of a specific Template object.

      +

      +New in version 0.6: Cache by itself is mostly a +container for a CacheImpl object, which implements +a fixed API to provide caching services; specific subclasses exist to +implement different +caching strategies. Mako includes a backend that works with +the Beaker caching system. Beaker itself then supports +a number of backends (i.e. file, memory, memcached, etc.)

      +

      The construction of a Cache is part of the mechanics +of a Template, and programmatic access to this +cache is typically via the Template.cache attribute.

      +
      +
      +get(key, **kw)??
      +

      Retrieve a value from the cache.

      + +++ + + + +
      Parameters:
        +
      • key – the value’s key.
      • +
      • **kw – cache configuration arguments. The +backend is configured using these arguments upon first request. +Subsequent requests that use the same series of configuration +values will use that same backend.
      • +
      +
      +
      + +
      +
      +get_or_create(key, creation_function, **kw)??
      +

      Retrieve a value from the cache, using the given creation function +to generate a new value.

      +
      + +
      +
      +id = None??
      +

      Return the ‘id’ that identifies this cache.

      +

      This is a value that should be globally unique to the +Template associated with this cache, and can +be used by a caching system to name a local container +for data specific to this template.

      +
      + +
      +
      +impl = None??
      +

      Provide the CacheImpl in use by this Cache.

      +

      This accessor allows a CacheImpl with additional +methods beyond that of Cache to be used programmatically.

      +
      + +
      +
      +invalidate(key, **kw)??
      +

      Invalidate a value in the cache.

      + +++ + + + +
      Parameters:
        +
      • key – the value’s key.
      • +
      • **kw – cache configuration arguments. The +backend is configured using these arguments upon first request. +Subsequent requests that use the same series of configuration +values will use that same backend.
      • +
      +
      +
      + +
      +
      +invalidate_body()??
      +

      Invalidate the cached content of the “body” method for this +template.

      +
      + +
      +
      +invalidate_closure(name)??
      +

      Invalidate a nested <%def> within this template.

      +

      Caching of nested defs is a blunt tool as there is no +management of scope – nested defs that use cache tags +need to have names unique of all other nested defs in the +template, else their content will be overwritten by +each other.

      +
      + +
      +
      +invalidate_def(name)??
      +

      Invalidate the cached content of a particular <%def> within this +template.

      +
      + +
      +
      +put(key, value, **kw)??
      +

      A synonym for Cache.set().

      +

      This is here for backwards compatibility.

      +
      + +
      +
      +set(key, value, **kw)??
      +

      Place a value in the cache.

      + +++ + + + +
      Parameters:
        +
      • key – the value’s key.
      • +
      • value – the value.
      • +
      • **kw – cache configuration arguments.
      • +
      +
      +
      + +
      +
      +starttime = None??
      +

      Epochal time value for when the owning Template was +first compiled.

      +

      A cache implementation may wish to invalidate data earlier than +this timestamp; this has the effect of the cache for a specific +Template starting clean any time the Template +is recompiled, such as when the original template file changed on +the filesystem.

      +
      + +
      + +
      +
      +class mako.cache.CacheImpl(cache)??
      +

      Bases: object

      +

      Provide a cache implementation for use by Cache.

      +
      +
      +get(key, **kw)??
      +

      Retrieve a value from the cache.

      + +++ + + + +
      Parameters:
        +
      • key – the value’s key.
      • +
      • **kw – cache configuration arguments.
      • +
      +
      +
      + +
      +
      +get_or_create(key, creation_function, **kw)??
      +

      Retrieve a value from the cache, using the given creation function +to generate a new value.

      +

      This function must return a value, either from +the cache, or via the given creation function. +If the creation function is called, the newly +created value should be populated into the cache +under the given key before being returned.

      + +++ + + + +
      Parameters:
        +
      • key – the value’s key.
      • +
      • creation_function – function that when called generates +a new value.
      • +
      • **kw – cache configuration arguments.
      • +
      +
      +
      + +
      +
      +invalidate(key, **kw)??
      +

      Invalidate a value in the cache.

      + +++ + + + +
      Parameters:
        +
      • key – the value’s key.
      • +
      • **kw – cache configuration arguments.
      • +
      +
      +
      + +
      +
      +pass_context = False??
      +

      If True, the Context will be passed to +get_or_create as the name 'context'.

      +
      + +
      +
      +set(key, value, **kw)??
      +

      Place a value in the cache.

      + +++ + + + +
      Parameters:
        +
      • key – the value’s key.
      • +
      • value – the value.
      • +
      • **kw – cache configuration arguments.
      • +
      +
      +
      + +
      + +
      +
      +mako.cache.register_plugin(self, name, modulepath, objname)??
      +
      + +
      +
      +class mako.ext.beaker_cache.BeakerCacheImpl(cache)??
      +

      Bases: mako.cache.CacheImpl

      +

      A CacheImpl provided for the Beaker caching system.

      +

      This plugin is used by default, based on the default +value of 'beaker' for the cache_impl parameter of the +Template or TemplateLookup classes.

      +
      + +
      +
      + +
      + +
      + + + +
      + +
      + +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/defs.html b/lib3/Mako-0.7.3/doc/defs.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/defs.html @@ -0,0 +1,728 @@ + + + + + + + Defs and Blocks + — + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + ?? + Defs and Blocks + + +

      + + Defs and Blocks + +

      +
      + +
      + +
      + + + +
      + +
      +

      Defs and Blocks??

      +

      <%def> and <%block> are two tags that both demarcate any block of text +and/or code. They both exist within generated Python as a callable function, +i.e., a Python def. They differ in their scope and calling semantics. +Whereas <%def> provides a construct that is very much like a named Python +def, the <%block> is more layout oriented.

      +
      +

      Using Defs??

      +

      The <%def> tag requires a name attribute, where the name references +a Python function signature:

      +
      <%def name="hello()">
      +    hello world
      +</%def>
      +
      +
      +

      To invoke the <%def>, it is normally called as an expression:

      +
      the def:  ${hello()}
      +
      +
      +

      If the <%def> is not nested inside of another <%def>, +it’s known as a top level def and can be accessed anywhere in +the template, including above where it was defined.

      +

      All defs, top level or not, have access to the current +contextual namespace in exactly the same way their containing +template does. Suppose the template below is executed with the +variables username and accountdata inside the context:

      +
      Hello there ${username}, how are ya.  Lets see what your account says:
      +
      +${account()}
      +
      +<%def name="account()">
      +    Account for ${username}:<br/>
      +
      +    % for row in accountdata:
      +        Value: ${row}<br/>
      +    % endfor
      +</%def>
      +
      +
      +

      The username and accountdata variables are present +within the main template body as well as the body of the +account() def.

      +

      Since defs are just Python functions, you can define and pass +arguments to them as well:

      +
      ${account(accountname='john')}
      +
      +<%def name="account(accountname, type='regular')">
      +    account name: ${accountname}, type: ${type}
      +</%def>
      +
      +
      +

      When you declare an argument signature for your def, they are +required to follow normal Python conventions (i.e., all +arguments are required except keyword arguments with a default +value). This is in contrast to using context-level variables, +which evaluate to UNDEFINED if you reference a name that +does not exist.

      +
      +

      Calling Defs from Other Files??

      +

      Top level <%def>s are exported by your template’s +module, and can be called from the outside; including from other +templates, as well as normal Python code. Calling a <%def> +from another template is something like using an <%include> +– except you are calling a specific function within the +template, not the whole template.

      +

      The remote <%def> call is also a little bit like calling +functions from other modules in Python. There is an “import” +step to pull the names from another template into your own +template; then the function or functions are available.

      +

      To import another template, use the <%namespace> tag:

      +
      <%namespace name="mystuff" file="mystuff.html"/>
      +
      +
      +

      The above tag adds a local variable mystuff to the current +scope.

      +

      Then, just call the defs off of mystuff:

      +
      ${mystuff.somedef(x=5,y=7)}
      +
      +
      +

      The <%namespace> tag also supports some of the other +semantics of Python’s import statement, including pulling +names into the local variable space, or using * to represent +all names, using the import attribute:

      +
      <%namespace file="mystuff.html" import="foo, bar"/>
      +
      +
      +

      This is just a quick intro to the concept of a namespace, +which is a central Mako concept that has its own chapter in +these docs. For more detail and examples, see +Namespaces.

      +
      +
      +

      Calling Defs Programmatically??

      +

      You can call defs programmatically from any Template object +using the get_def() method, which returns a DefTemplate +object. This is a Template subclass which the parent +Template creates, and is usable like any other template:

      +
      from mako.template import Template
      +
      +template = Template("""
      +    <%def name="hi(name)">
      +        hi ${name}!
      +    </%def>
      +
      +    <%def name="bye(name)">
      +        bye ${name}!
      +    </%def>
      +""")
      +
      +print template.get_def("hi").render(name="ed")
      +print template.get_def("bye").render(name="ed")
      +
      +
      +
      +
      +

      Defs within Defs??

      +

      The def model follows regular Python rules for closures. +Declaring <%def> inside another <%def> declares it +within the parent’s enclosing scope:

      +
      <%def name="mydef()">
      +    <%def name="subdef()">
      +        a sub def
      +    </%def>
      +
      +    i'm the def, and the subcomponent is ${subdef()}
      +</%def>
      +
      +
      +

      Just like Python, names that exist outside the inner <%def> +exist inside it as well:

      +
      <%
      +    x = 12
      +%>
      +<%def name="outer()">
      +    <%
      +        y = 15
      +    %>
      +    <%def name="inner()">
      +        inner, x is ${x}, y is ${y}
      +    </%def>
      +
      +    outer, x is ${x}, y is ${y}
      +</%def>
      +
      +
      +

      Assigning to a name inside of a def declares that name as local +to the scope of that def (again, like Python itself). This means +the following code will raise an error:

      +
      <%
      +    x = 10
      +%>
      +<%def name="somedef()">
      +    ## error !
      +    somedef, x is ${x}
      +    <%
      +        x = 27
      +    %>
      +</%def>
      +
      +
      +

      ...because the assignment to x declares x as local to the +scope of somedef, rendering the “outer” version unreachable +in the expression that tries to render it.

      +
      +
      +

      Calling a Def with Embedded Content and/or Other Defs??

      +

      A flip-side to def within def is a def call with content. This +is where you call a def, and at the same time declare a block of +content (or multiple blocks) that can be used by the def being +called. The main point of such a call is to create custom, +nestable tags, just like any other template language’s +custom-tag creation system – where the external tag controls the +execution of the nested tags and can communicate state to them. +Only with Mako, you don’t have to use any external Python +modules, you can define arbitrarily nestable tags right in your +templates.

      +

      To achieve this, the target def is invoked using the form +<%namepacename:defname> instead of the normal ${} +syntax. This syntax, introduced in Mako 0.2.3, is functionally +equivalent to another tag known as %call, which takes the form +<%call expr='namespacename.defname(args)'>. While %call +is available in all versions of Mako, the newer style is +probably more familiar looking. The namespace portion of the +call is the name of the namespace in which the def is +defined – in the most simple cases, this can be local or +self to reference the current template’s namespace (the +difference between local and self is one of inheritance +– see Built-in Namespaces for details).

      +

      When the target def is invoked, a variable caller is placed +in its context which contains another namespace containing the +body and other defs defined by the caller. The body itself is +referenced by the method body(). Below, we build a %def +that operates upon caller.body() to invoke the body of the +custom tag:

      +
      <%def name="buildtable()">
      +    <table>
      +        <tr><td>
      +            ${caller.body()}
      +        </td></tr>
      +    </table>
      +</%def>
      +
      +<%self:buildtable>
      +    I am the table body.
      +</%self:buildtable>
      +
      +
      +

      This produces the output (whitespace formatted):

      +
      <table>
      +    <tr><td>
      +        I am the table body.
      +    </td></tr>
      +</table>
      +
      +
      +

      Using the older %call syntax looks like:

      +
      <%def name="buildtable()">
      +    <table>
      +        <tr><td>
      +            ${caller.body()}
      +        </td></tr>
      +    </table>
      +</%def>
      +
      +<%call expr="buildtable()">
      +    I am the table body.
      +</%call>
      +
      +
      +

      The body() can be executed multiple times or not at all. +This means you can use def-call-with-content to build iterators, +conditionals, etc:

      +
      <%def name="lister(count)">
      +    % for x in range(count):
      +        ${caller.body()}
      +    % endfor
      +</%def>
      +
      +<%self:lister count="${3}">
      +    hi
      +</%self:lister>
      +
      +
      +

      Produces:

      +
      hi
      +hi
      +hi
      +
      +
      +

      Notice above we pass 3 as a Python expression, so that it +remains as an integer.

      +

      A custom “conditional” tag:

      +
      <%def name="conditional(expression)">
      +    % if expression:
      +        ${caller.body()}
      +    % endif
      +</%def>
      +
      +<%self:conditional expression="${4==4}">
      +    i'm the result
      +</%self:conditional>
      +
      +
      +

      Produces:

      +
      i'm the result
      +
      +
      +

      But that’s not all. The body() function also can handle +arguments, which will augment the local namespace of the body +callable. The caller must define the arguments which it expects +to receive from its target def using the args attribute, +which is a comma-separated list of argument names. Below, our +<%def> calls the body() of its caller, passing in an +element of data from its argument:

      +
      <%def name="layoutdata(somedata)">
      +    <table>
      +    % for item in somedata:
      +        <tr>
      +        % for col in item:
      +            <td>${caller.body(col=col)}</td>
      +        % endfor
      +        </tr>
      +    % endfor
      +    </table>
      +</%def>
      +
      +<%self:layoutdata somedata="${[[1,2,3],[4,5,6],[7,8,9]]}" args="col">\
      +Body data: ${col}\
      +</%self:layoutdata>
      +
      +
      +

      Produces:

      +
      <table>
      +    <tr>
      +        <td>Body data: 1</td>
      +        <td>Body data: 2</td>
      +        <td>Body data: 3</td>
      +    </tr>
      +    <tr>
      +        <td>Body data: 4</td>
      +        <td>Body data: 5</td>
      +        <td>Body data: 6</td>
      +    </tr>
      +    <tr>
      +        <td>Body data: 7</td>
      +        <td>Body data: 8</td>
      +        <td>Body data: 9</td>
      +    </tr>
      +</table>
      +
      +
      +

      You don’t have to stick to calling just the body() function. +The caller can define any number of callables, allowing the +<%call> tag to produce whole layouts:

      +
      <%def name="layout()">
      +    ## a layout def
      +    <div class="mainlayout">
      +        <div class="header">
      +            ${caller.header()}
      +        </div>
      +
      +        <div class="sidebar">
      +            ${caller.sidebar()}
      +        </div>
      +
      +        <div class="content">
      +            ${caller.body()}
      +        </div>
      +    </div>
      +</%def>
      +
      +## calls the layout def
      +<%self:layout>
      +    <%def name="header()">
      +        I am the header
      +    </%def>
      +    <%def name="sidebar()">
      +        <ul>
      +            <li>sidebar 1</li>
      +            <li>sidebar 2</li>
      +        </ul>
      +    </%def>
      +
      +        this is the body
      +</%self:layout>
      +
      +
      +

      The above layout would produce:

      +
      <div class="mainlayout">
      +    <div class="header">
      +    I am the header
      +    </div>
      +
      +    <div class="sidebar">
      +    <ul>
      +        <li>sidebar 1</li>
      +        <li>sidebar 2</li>
      +    </ul>
      +    </div>
      +
      +    <div class="content">
      +    this is the body
      +    </div>
      +</div>
      +
      +
      +

      The number of things you can do with <%call> and/or the +<%namespacename:defname> calling syntax is enormous. You can +create form widget libraries, such as an enclosing <FORM> +tag and nested HTML input elements, or portable wrapping schemes +using <div> or other elements. You can create tags that +interpret rows of data, such as from a database, providing the +individual columns of each row to a body() callable which +lays out the row any way it wants. Basically anything you’d do +with a “custom tag” or tag library in some other system, Mako +provides via <%def> tags and plain Python callables which are +invoked via <%namespacename:defname> or <%call>.

      +
      +
      +
      +

      Using Blocks??

      +

      The <%block> tag introduces some new twists on the +<%def> tag which make it more closely tailored towards layout.

      +

      +New in version 0.4.1.

      +

      An example of a block:

      +
      <html>
      +    <body>
      +        <%block>
      +            this is a block.
      +        </%block>
      +    </body>
      +</html>
      +
      +
      +

      In the above example, we define a simple block. The block renders its content in the place +that it’s defined. Since the block is called for us, it doesn’t need a name and the above +is referred to as an anonymous block. So the output of the above template will be:

      +
      <html>
      +    <body>
      +            this is a block.
      +    </body>
      +</html>
      +
      +
      +

      So in fact the above block has absolutely no effect. Its usefulness comes when we start +using modifiers. Such as, we can apply a filter to our block:

      +
      <html>
      +    <body>
      +        <%block filter="h">
      +            <html>this is some escaped html.</html>
      +        </%block>
      +    </body>
      +</html>
      +
      +
      +

      or perhaps a caching directive:

      +
      <html>
      +    <body>
      +        <%block cached="True" cache_timeout="60">
      +            This content will be cached for 60 seconds.
      +        </%block>
      +    </body>
      +</html>
      +
      +
      +

      Blocks also work in iterations, conditionals, just like defs:

      +
      % if some_condition:
      +    <%block>condition is met</%block>
      +% endif
      +
      +
      +

      While the block renders at the point it is defined in the template, +the underlying function is present in the generated Python code only +once, so there’s no issue with placing a block inside of a loop or +similar. Anonymous blocks are defined as closures in the local +rendering body, so have access to local variable scope:

      +
      % for i in range(1, 4):
      +    <%block>i is ${i}</%block>
      +% endfor
      +
      +
      +
      +

      Using Named Blocks??

      +

      Possibly the more important area where blocks are useful is when we +do actually give them names. Named blocks are tailored to behave +somewhat closely to Jinja2’s block tag, in that they define an area +of a layout which can be overridden by an inheriting template. In +sharp contrast to the <%def> tag, the name given to a block is +global for the entire template regardless of how deeply it’s nested:

      +
      <html>
      +<%block name="header">
      +    <head>
      +        <title>
      +            <%block name="title">Title</%block>
      +        </title>
      +    </head>
      +</%block>
      +<body>
      +    ${next.body()}
      +</body>
      +</html>
      +
      +
      +

      The above example has two named blocks “header” and “title”, both of which can be referred to +by an inheriting template. A detailed walkthrough of this usage can be found at Inheritance.

      +

      Note above that named blocks don’t have any argument declaration the way defs do. They still implement themselves +as Python functions, however, so they can be invoked additional times beyond their initial definition:

      +
      <div name="page">
      +    <%block name="pagecontrol">
      +        <a href="">previous page</a> |
      +        <a href="">next page</a>
      +    </%block>
      +
      +    <table>
      +        ## some content
      +    </table>
      +
      +    ${pagecontrol()}
      +</div>
      +
      +
      +

      The content referenced by pagecontrol above will be rendered both above and below the <table> tags.

      +

      To keep things sane, named blocks have restrictions that defs do not:

      +
        +
      • The <%block> declaration cannot have any argument signature.
      • +
      • The name of a <%block> can only be defined once in a template – an error is raised if two blocks of the same +name occur anywhere in a single template, regardless of nesting. A similar error is raised if a top level def +shares the same name as that of a block.
      • +
      • A named <%block> cannot be defined within a <%def>, or inside the body of a “call”, i.e. +<%call> or <%namespacename:defname> tag. Anonymous blocks can, however.
      • +
      +
      +
      +

      Using Page Arguments in Named Blocks??

      +

      A named block is very much like a top level def. It has a similar +restriction to these types of defs in that arguments passed to the +template via the <%page> tag aren’t automatically available. +Using arguments with the <%page> tag is described in the section +The body() Method, and refers to scenarios such as when the +body() method of a template is called from an inherited template passing +arguments, or the template is invoked from an <%include> tag +with arguments. To allow a named block to share the same arguments +passed to the page, the args attribute can be used:

      +
      <%page args="post"/>
      +
      +<a name="${post.title}" />
      +
      +<span class="post_prose">
      +    <%block name="post_prose" args="post">
      +        ${post.content}
      +    </%block>
      +</span>
      +
      +
      +

      Where above, if the template is called via a directive like +<%include file="post.mako" args="post=post" />, the post +variable is available both in the main body as well as the +post_prose block.

      +

      Similarly, the **pageargs variable is present, in named blocks only, +for those arguments not explicit in the <%page> tag:

      +
      <%block name="post_prose">
      +    ${pageargs['post'].content}
      +</%block>
      +
      +
      +

      The args attribute is only allowed with named blocks. With +anonymous blocks, the Python function is always rendered in the same +scope as the call itself, so anything available directly outside the +anonymous block is available inside as well.

      +
      +
      +
      + +
      + +
      + + + +
      + +
      + +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/filtering.html b/lib3/Mako-0.7.3/doc/filtering.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/filtering.html @@ -0,0 +1,478 @@ + + + + + + + Filtering and Buffering + — + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + ?? + Filtering and Buffering + + +

      + + Filtering and Buffering + +

      +
      + +
      + +
      + + + +
      + +
      +

      Filtering and Buffering??

      +
      +

      Expression Filtering??

      +

      As described in the chapter Syntax, the “|” operator can be +applied to a “${}” expression to apply escape filters to the +output:

      +
      ${"this is some text" | u}
      +
      +
      +

      The above expression applies URL escaping to the expression, and +produces this+is+some+text.

      +

      The built-in escape flags are:

      +
        +
      • u : URL escaping, provided by +urllib.quote_plus(string.encode('utf-8'))

        +
      • +
      • h : HTML escaping, provided by +markupsafe.escape(string)

        +

        +New in version 0.3.4: Prior versions use cgi.escape(string, True).

        +
      • +
      • x : XML escaping

        +
      • +
      • trim : whitespace trimming, provided by string.strip()

        +
      • +
      • entity : produces HTML entity references for applicable +strings, derived from htmlentitydefs

        +
      • +
      • unicode (str on Python 3): produces a Python unicode +string (this function is applied by default)

        +
      • +
      • decode.<some encoding>: decode input into a Python +unicode with the specified encoding

        +
      • +
      • n : disable all default filtering; only filters specified +in the local expression tag will be applied.

        +
      • +
      +

      To apply more than one filter, separate them by a comma:

      +
      ${" <tag>some value</tag> " | h,trim}
      +
      +
      +

      The above produces &lt;tag&gt;some value&lt;/tag&gt;, with +no leading or trailing whitespace. The HTML escaping function is +applied first, the “trim” function second.

      +

      Naturally, you can make your own filters too. A filter is just a +Python function that accepts a single string argument, and +returns the filtered result. The expressions after the | +operator draw upon the local namespace of the template in which +they appear, meaning you can define escaping functions locally:

      +
      <%!
      +    def myescape(text):
      +        return "<TAG>" + text + "</TAG>"
      +%>
      +
      +Here's some tagged text: ${"text" | myescape}
      +
      +
      +

      Or from any Python module:

      +
      <%!
      +    import myfilters
      +%>
      +
      +Here's some tagged text: ${"text" | myfilters.tagfilter}
      +
      +
      +

      A page can apply a default set of filters to all expression tags +using the expression_filter argument to the %page tag:

      +
      <%page expression_filter="h"/>
      +
      +Escaped text:  ${"<html>some html</html>"}
      +
      +
      +

      Result:

      +
      Escaped text: &lt;html&gt;some html&lt;/html&gt;
      +
      +
      +
      +

      The default_filters Argument??

      +

      In addition to the expression_filter argument, the +default_filters argument to both Template and +TemplateLookup can specify filtering for all expression tags +at the programmatic level. This array-based argument, when given +its default argument of None, will be internally set to +["unicode"] (or ["str"] on Python 3), except when +disable_unicode=True is set in which case it defaults to +["str"]:

      +
      t = TemplateLookup(directories=['/tmp'], default_filters=['unicode'])
      +
      +
      +

      To replace the usual unicode/str function with a +specific encoding, the decode filter can be substituted:

      +
      t = TemplateLookup(directories=['/tmp'], default_filters=['decode.utf8'])
      +
      +
      +

      To disable default_filters entirely, set it to an empty +list:

      +
      t = TemplateLookup(directories=['/tmp'], default_filters=[])
      +
      +
      +

      Any string name can be added to default_filters where it +will be added to all expressions as a filter. The filters are +applied from left to right, meaning the leftmost filter is +applied first.

      +
      t = Template(templatetext, default_filters=['unicode', 'myfilter'])
      +
      +
      +

      To ease the usage of default_filters with custom filters, +you can also add imports (or other code) to all templates using +the imports argument:

      +
      t = TemplateLookup(directories=['/tmp'],
      +                   default_filters=['unicode', 'myfilter'],
      +                   imports=['from mypackage import myfilter'])
      +
      +
      +

      The above will generate templates something like this:

      +
      # ....
      +from mypackage import myfilter
      +
      +def render_body(context):
      +    context.write(myfilter(unicode("some text")))
      +
      +
      +
      +
      +

      Turning off Filtering with the n Filter??

      +

      In all cases the special n filter, used locally within an +expression, will disable all filters declared in the +<%page> tag as well as in default_filters. Such as:

      +
      ${'myexpression' | n}
      +
      +
      +

      will render myexpression with no filtering of any kind, and:

      +
      ${'myexpression' | n,trim}
      +
      +
      +

      will render myexpression using the trim filter only.

      +
      +
      +
      +

      Filtering Defs and Blocks??

      +

      The %def and %block tags have an argument called filter which will apply the +given list of filter functions to the output of the %def:

      +
      <%def name="foo()" filter="h, trim">
      +    <b>this is bold</b>
      +</%def>
      +
      +
      +

      When the filter attribute is applied to a def as above, the def +is automatically buffered as well. This is described next.

      +
      +
      +

      Buffering??

      +

      One of Mako’s central design goals is speed. To this end, all of +the textual content within a template and its various callables +is by default piped directly to the single buffer that is stored +within the Context object. While this normally is easy to +miss, it has certain side effects. The main one is that when you +call a def using the normal expression syntax, i.e. +${somedef()}, it may appear that the return value of the +function is the content it produced, which is then delivered to +your template just like any other expression substitution, +except that normally, this is not the case; the return value of +${somedef()} is simply the empty string ''. By the time +you receive this empty string, the output of somedef() has +been sent to the underlying buffer.

      +

      You may not want this effect, if for example you are doing +something like this:

      +
      ${" results " + somedef() + " more results "}
      +
      +
      +

      If the somedef() function produced the content “somedef's +results”, the above template would produce this output:

      +
      somedef's results results more results
      +
      +
      +

      This is because somedef() fully executes before the +expression returns the results of its concatenation; the +concatenation in turn receives just the empty string as its +middle expression.

      +

      Mako provides two ways to work around this. One is by applying +buffering to the %def itself:

      +
      <%def name="somedef()" buffered="True">
      +    somedef's results
      +</%def>
      +
      +
      +

      The above definition will generate code similar to this:

      +
      def somedef():
      +    context.push_buffer()
      +    try:
      +        context.write("somedef's results")
      +    finally:
      +        buf = context.pop_buffer()
      +    return buf.getvalue()
      +
      +
      +

      So that the content of somedef() is sent to a second buffer, +which is then popped off the stack and its value returned. The +speed hit inherent in buffering the output of a def is also +apparent.

      +

      Note that the filter argument on %def also causes the def to +be buffered. This is so that the final content of the %def can +be delivered to the escaping function in one batch, which +reduces method calls and also produces more deterministic +behavior for the filtering function itself, which can possibly +be useful for a filtering function that wishes to apply a +transformation to the text as a whole.

      +

      The other way to buffer the output of a def or any Mako callable +is by using the built-in capture function. This function +performs an operation similar to the above buffering operation +except it is specified by the caller.

      +
      ${" results " + capture(somedef) + " more results "}
      +
      +
      +

      Note that the first argument to the capture function is +the function itself, not the result of calling it. This is +because the capture function takes over the job of actually +calling the target function, after setting up a buffered +environment. To send arguments to the function, just send them +to capture instead:

      +
      ${capture(somedef, 17, 'hi', use_paging=True)}
      +
      +
      +

      The above call is equivalent to the unbuffered call:

      +
      ${somedef(17, 'hi', use_paging=True)}
      +
      +
      +
      +
      +

      Decorating??

      +

      +New in version 0.2.5.

      +

      Somewhat like a filter for a %def but more flexible, the decorator +argument to %def allows the creation of a function that will +work in a similar manner to a Python decorator. The function can +control whether or not the function executes. The original +intent of this function is to allow the creation of custom cache +logic, but there may be other uses as well.

      +

      decorator is intended to be used with a regular Python +function, such as one defined in a library module. Here we’ll +illustrate the python function defined in the template for +simplicities’ sake:

      +
      <%!
      +    def bar(fn):
      +        def decorate(context, *args, **kw):
      +            context.write("BAR")
      +            fn(*args, **kw)
      +            context.write("BAR")
      +            return ''
      +        return decorate
      +%>
      +
      +<%def name="foo()" decorator="bar">
      +    this is foo
      +</%def>
      +
      +${foo()}
      +
      +
      +

      The above template will return, with more whitespace than this, +"BAR this is foo BAR". The function is the render callable +itself (or possibly a wrapper around it), and by default will +write to the context. To capture its output, use the capture() +callable in the mako.runtime module (available in templates +as just runtime):

      +
      <%!
      +    def bar(fn):
      +        def decorate(context, *args, **kw):
      +            return "BAR" + runtime.capture(context, fn, *args, **kw) + "BAR"
      +        return decorate
      +%>
      +
      +<%def name="foo()" decorator="bar">
      +    this is foo
      +</%def>
      +
      +${foo()}
      +
      +
      +

      The decorator can be used with top-level defs as well as nested +defs, and blocks too. Note that when calling a top-level def from the +Template API, i.e. template.get_def('somedef').render(), +the decorator has to write the output to the context, i.e. +as in the first example. The return value gets discarded.

      +
      +
      + +
      + +
      + + + +
      + +
      + +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/genindex.html b/lib3/Mako-0.7.3/doc/genindex.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/genindex.html @@ -0,0 +1,916 @@ + + + + + + + Index + — + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + ?? + Index + + +

      + + Index + +

      +
      + +
      + +
      + + +
      + + + + +

      Index

      + + A + | B + | C + | D + | E + | F + | G + | H + | I + | K + | L + | M + | N + | P + | R + | S + | T + | U + | W + +
      + +

      A

      +
      +
      + + +
      + adjust_uri() (mako.lookup.TemplateCollection method) +
      + +
      +
      (mako.lookup.TemplateLookup method) +
      +
      + + + +
      + + +
      + attr (mako.runtime.Namespace attribute) +
      + + + + +
      +
      +

      B

      +
      +
      + + +
      + BeakerCacheImpl (class in mako.ext.beaker_cache) +
      + + + + +
      + +
      +
      +

      C

      +
      +
      + + +
      + Cache (class in mako.cache) +
      + + + + + +
      + cache (mako.runtime.Namespace attribute) +
      + + + + + +
      + CacheImpl (class in mako.cache) +
      + + + + + +
      + capture() (in module mako.runtime) +
      + + + + + +
      + code (mako.template.Template attribute) +
      + + + + +
      + + +
      + Context (class in mako.runtime) +
      + + + + + +
      + context (mako.runtime.Namespace attribute) +
      + + + + + +
      + cycle() (mako.runtime.LoopContext method) +
      + + + + +
      +
      +

      D

      +
      +
      + + +
      + DefTemplate (class in mako.template) +
      + + + + +
      + +
      +
      +

      E

      +
      +
      + + +
      + error (RichTraceback attribute) +
      + + + + +
      + +
      +
      +

      F

      +
      +
      + + +
      + filename (mako.runtime.ModuleNamespace attribute) +
      + +
      +
      (mako.runtime.Namespace attribute) +
      +
      (mako.runtime.TemplateNamespace attribute) +
      +
      + + + +
      + + +
      + filename_to_uri() (mako.lookup.TemplateCollection method) +
      + +
      +
      (mako.lookup.TemplateLookup method) +
      +
      + + + +
      +
      +

      G

      +
      +
      + + +
      + get() (mako.cache.Cache method) +
      + +
      +
      (mako.cache.CacheImpl method) +
      +
      (mako.runtime.Context method) +
      +
      + + + + +
      + get_cached() (mako.runtime.Namespace method) +
      + + + + + +
      + get_def() (mako.template.Template method) +
      + + + + + +
      + get_namespace() (mako.runtime.Namespace method) +
      + + + + +
      + + +
      + get_or_create() (mako.cache.Cache method) +
      + +
      +
      (mako.cache.CacheImpl method) +
      +
      + + + + +
      + get_template() (mako.lookup.TemplateCollection method) +
      + +
      +
      (mako.lookup.TemplateLookup method) +
      +
      (mako.runtime.Namespace method) +
      +
      + + + +
      +
      +

      H

      +
      +
      + + +
      + has_template() (mako.lookup.TemplateCollection method) +
      + + + + + +
      + html_error_template() (in module mako.exceptions) +
      + + + + +
      + +
      +
      +

      I

      +
      +
      + + +
      + id (mako.cache.Cache attribute) +
      + + + + + +
      + impl (mako.cache.Cache attribute) +
      + + + + + +
      + include_file() (mako.runtime.Namespace method) +
      + + + + + +
      + invalidate() (mako.cache.Cache method) +
      + +
      +
      (mako.cache.CacheImpl method) +
      +
      + + + +
      + + +
      + invalidate_body() (mako.cache.Cache method) +
      + + + + + +
      + invalidate_closure() (mako.cache.Cache method) +
      + + + + + +
      + invalidate_def() (mako.cache.Cache method) +
      + + + + +
      +
      +

      K

      +
      +
      + + +
      + keys() (mako.runtime.Context method) +
      + + + + + +
      + kwargs (mako.runtime.Context attribute) +
      + + + + +
      + +
      +
      +

      L

      +
      +
      + + +
      + lineno (RichTraceback attribute) +
      + + + + + +
      + locals_() (mako.runtime.Context method) +
      + + + + + +
      + lookup (mako.runtime.Context attribute) +
      + + + + +
      + + +
      + LoopContext (class in mako.runtime) +
      + + + + +
      +
      +

      M

      +
      +
      + + +
      + message (RichTraceback attribute) +
      + + + + + +
      + module (mako.runtime.Namespace attribute) +
      + +
      +
      (mako.runtime.TemplateNamespace attribute) +
      +
      + + + +
      + + +
      + ModuleNamespace (class in mako.runtime) +
      + + + + +
      +
      +

      N

      +
      +
      + + +
      + Namespace (class in mako.runtime) +
      + + + + +
      + +
      +
      +

      P

      +
      +
      + + +
      + pass_context (mako.cache.CacheImpl attribute) +
      + + + + + +
      + pop_caller() (mako.runtime.Context method) +
      + + + + + +
      + push_caller() (mako.runtime.Context method) +
      + + + + + +
      + put() (mako.cache.Cache method) +
      + + + + +
      + + +
      + put_string() (mako.lookup.TemplateLookup method) +
      + + + + + +
      + put_template() (mako.lookup.TemplateLookup method) +
      + + + + +
      +
      +

      R

      +
      +
      + + +
      + records (RichTraceback attribute) +
      + + + + + +
      + register_plugin() (in module mako.cache) +
      + + + + + +
      + render() (mako.template.Template method) +
      + + + + + +
      + render_context() (mako.template.Template method) +
      + + + + + +
      + render_unicode() (mako.template.Template method) +
      + + + + +
      + + +
      + reverse_records (RichTraceback attribute) +
      + + + + + +
      + reverse_traceback (RichTraceback attribute) +
      + + + + + +
      + RichTraceback (class in mako.exceptions) +
      + + + + +
      +
      +

      S

      +
      +
      + + +
      + set() (mako.cache.Cache method) +
      + +
      +
      (mako.cache.CacheImpl method) +
      +
      + + + + +
      + source (mako.template.Template attribute) +
      + +
      +
      (RichTraceback attribute) +
      +
      + + + +
      + + +
      + starttime (mako.cache.Cache attribute) +
      + + + + + +
      + supports_caller() (in module mako.runtime) +
      + + + + +
      +
      +

      T

      +
      +
      + + +
      + Template (class in mako.template) +
      + + + + + +
      + template (mako.runtime.Namespace attribute) +
      + + + + + +
      + TemplateCollection (class in mako.lookup) +
      + + + + + +
      + TemplateLookup (class in mako.lookup) +
      + + + + +
      + + +
      + TemplateNamespace (class in mako.runtime) +
      + + + + + +
      + text_error_template() (in module mako.exceptions) +
      + + + + +
      +
      +

      U

      +
      +
      + + +
      + Undefined (class in mako.runtime) +
      + + + + + +
      + uri (mako.runtime.Namespace attribute) +
      + +
      +
      (mako.runtime.TemplateNamespace attribute) +
      +
      + + + +
      + +
      +
      +

      W

      +
      +
      + + +
      + write() (mako.runtime.Context method) +
      + + + + + +
      + writer() (mako.runtime.Context method) +
      + + + + +
      + +
      +
      + + + +
      + +
      + + + +
      + +
      + +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/index.html b/lib3/Mako-0.7.3/doc/index.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/index.html @@ -0,0 +1,230 @@ + + + + + + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + +

      + + Table of Contents + +

      +
      + +
      + + + + + +
      + +
      + +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/inheritance.html b/lib3/Mako-0.7.3/doc/inheritance.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/inheritance.html @@ -0,0 +1,673 @@ + + + + + + + Inheritance + — + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + ?? + Inheritance + + +

      + + Inheritance + +

      +
      + +
      + +
      + + + +
      + +
      +

      Inheritance??

      +
      +

      Note

      +

      Most of the inheritance examples here take advantage of a feature that’s +new in Mako as of version 0.4.1 called the “block”. This tag is very similar to +the “def” tag but is more streamlined for usage with inheritance. Note that +all of the examples here which use blocks can also use defs instead. Contrasting +usages will be illustrated.

      +
      +

      Using template inheritance, two or more templates can organize +themselves into an inheritance chain, where content and +functions from all involved templates can be intermixed. The +general paradigm of template inheritance is this: if a template +A inherits from template B, then template A agrees +to send the executional control to template B at runtime +(A is called the inheriting template). Template B, +the inherited template, then makes decisions as to what +resources from A shall be executed.

      +

      In practice, it looks like this. Here’s a hypothetical inheriting +template, index.html:

      +
      ## index.html
      +<%inherit file="base.html"/>
      +
      +<%block name="header">
      +    this is some header content
      +</%block>
      +
      +this is the body content.
      +
      +
      +

      And base.html, the inherited template:

      +
      ## base.html
      +<html>
      +    <body>
      +        <div class="header">
      +            <%block name="header"/>
      +        </div>
      +
      +        ${self.body()}
      +
      +        <div class="footer">
      +            <%block name="footer">
      +                this is the footer
      +            </%block>
      +        </div>
      +    </body>
      +</html>
      +
      +
      +

      Here is a breakdown of the execution:

      +
        +
      1. When index.html is rendered, control immediately passes to +base.html.

        +
      2. +
      3. base.html then renders the top part of an HTML document, +then invokes the <%block name="header"> block. It invokes the +underlying header() function off of a built-in namespace +called self (this namespace was first introduced in the +Namespaces chapter in self). Since +index.html is the topmost template and also defines a block +called header, it’s this header block that ultimately gets +executed – instead of the one that’s present in base.html.

        +
      4. +
      5. Control comes back to base.html. Some more HTML is +rendered.

        +
      6. +
      7. base.html executes self.body(). The body() +function on all template-based namespaces refers to the main +body of the template, therefore the main body of +index.html is rendered.

        +
      8. +
      9. When <%block name="header"> is encountered in index.html +during the self.body() call, a conditional is checked – does the +current inherited template, i.e. base.html, also define this block? If yes, +the <%block> is not executed here – the inheritance +mechanism knows that the parent template is responsible for rendering +this block (and in fact it already has). In other words a block +only renders in its basemost scope.

        +
      10. +
      11. Control comes back to base.html. More HTML is rendered, +then the <%block name="footer"> expression is invoked.

        +
      12. +
      13. The footer block is only defined in base.html, so being +the topmost definition of footer, it’s the one that +executes. If index.html also specified footer, then +its version would override that of the base.

        +
      14. +
      15. base.html finishes up rendering its HTML and the template +is complete, producing:

        +
        <html>
        +    <body>
        +        <div class="header">
        +            this is some header content
        +        </div>
        +
        +        this is the body content.
        +
        +        <div class="footer">
        +            this is the footer
        +        </div>
        +    </body>
        +</html>
        +
        +
        +
      16. +
      +

      ...and that is template inheritance in a nutshell. The main idea +is that the methods that you call upon self always +correspond to the topmost definition of that method. Very much +the way self works in a Python class, even though Mako is +not actually using Python class inheritance to implement this +functionality. (Mako doesn’t take the “inheritance” metaphor too +seriously; while useful to setup some commonly recognized +semantics, a textual template is not very much like an +object-oriented class construct in practice).

      +
      +

      Nesting Blocks??

      +

      The named blocks defined in an inherited template can also be nested within +other blocks. The name given to each block is globally accessible via any inheriting +template. We can add a new block title to our header block:

      +
      ## base.html
      +<html>
      +    <body>
      +        <div class="header">
      +            <%block name="header">
      +                <h2>
      +                    <%block name="title"/>
      +                </h2>
      +            </%block>
      +        </div>
      +
      +        ${self.body()}
      +
      +        <div class="footer">
      +            <%block name="footer">
      +                this is the footer
      +            </%block>
      +        </div>
      +    </body>
      +</html>
      +
      +
      +

      The inheriting template can name either or both of header and title, separately +or nested themselves:

      +
      ## index.html
      +<%inherit file="base.html"/>
      +
      +<%block name="header">
      +    this is some header content
      +    ${parent.header()}
      +</%block>
      +
      +<%block name="title">
      +    this is the title
      +</%block>
      +
      +this is the body content.
      +
      +
      +

      Note when we overrode header, we added an extra call ${parent.header()} in order to invoke +the parent’s header block in addition to our own. That’s described in more detail below, +in Using the parent Namespace to Augment Defs.

      +
      +
      +

      Rendering a Named Block Multiple Times??

      +

      Recall from the section Using Blocks that a named block is just like a <%def>, +with some different usage rules. We can call one of our named sections distinctly, for example +a section that is used more than once, such as the title of a page:

      +
      <html>
      +    <head>
      +        <title>${self.title()}</title>
      +    </head>
      +    <body>
      +    <%block name="header">
      +        <h2><%block name="title"/></h2>
      +    </%block>
      +    ${self.body()}
      +    </body>
      +</html>
      +
      +
      +

      Where above an inheriting template can define <%block name="title"> just once, and it will be +used in the base template both in the <title> section as well as the <h2>.

      +
      +
      +

      But what about Defs???

      +

      The previous example used the <%block> tag to produce areas of content +to be overridden. Before Mako 0.4.1, there wasn’t any such tag – instead +there was only the <%def> tag. As it turns out, named blocks and defs are +largely interchangeable. The def simply doesn’t call itself automatically, +and has more open-ended naming and scoping rules that are more flexible and similar +to Python itself, but less suited towards layout. The first example from +this chapter using defs would look like:

      +
      ## index.html
      +<%inherit file="base.html"/>
      +
      +<%def name="header()">
      +    this is some header content
      +</%def>
      +
      +this is the body content.
      +
      +
      +

      And base.html, the inherited template:

      +
      ## base.html
      +<html>
      +    <body>
      +        <div class="header">
      +            ${self.header()}
      +        </div>
      +
      +        ${self.body()}
      +
      +        <div class="footer">
      +            ${self.footer()}
      +        </div>
      +    </body>
      +</html>
      +
      +<%def name="header()"/>
      +<%def name="footer()">
      +    this is the footer
      +</%def>
      +
      +
      +

      Above, we illustrate that defs differ from blocks in that their definition +and invocation are defined in two separate places, instead of at once. You can almost do exactly what a +block does if you put the two together:

      +
      <div class="header">
      +    <%def name="header()"></%def>${self.header()}
      +</div>
      +
      +
      +

      The <%block> is obviously more streamlined than the <%def> for this kind +of usage. In addition, +the above “inline” approach with <%def> does not work with nesting:

      +
      <head>
      +    <%def name="header()">
      +        <title>
      +        ## this won't work !
      +        <%def name="title()">default title</%def>${self.title()}
      +        </title>
      +    </%def>${self.header()}
      +</head>
      +
      +
      +

      Where above, the title() def, because it’s a def within a def, is not part of the +template’s exported namespace and will not be part of self. If the inherited template +did define its own title def at the top level, it would be called, but the “default title” +above is not present at all on self no matter what. For this to work as expected +you’d instead need to say:

      +
      <head>
      +    <%def name="header()">
      +        <title>
      +        ${self.title()}
      +        </title>
      +    </%def>${self.header()}
      +
      +    <%def name="title()"/>
      +</head>
      +
      +
      +

      That is, title is defined outside of any other defs so that it is in the self namespace. +It works, but the definition needs to be potentially far away from the point of render.

      +

      A named block is always placed in the self namespace, regardless of nesting, +so this restriction is lifted:

      +
      ## base.html
      +<head>
      +    <%block name="header">
      +        <title>
      +        <%block name="title"/>
      +        </title>
      +    </%block>
      +</head>
      +
      +
      +

      The above template defines title inside of header, and an inheriting template can define +one or both in any configuration, nested inside each other or not, in order for them to be used:

      +
      ## index.html
      +<%inherit file="base.html"/>
      +<%block name="title">
      +    the title
      +</%block>
      +<%block name="header">
      +    the header
      +</%block>
      +
      +
      +

      So while the <%block> tag lifts the restriction of nested blocks not being available externally, +in order to achieve this it adds the restriction that all block names in a single template need +to be globally unique within the template, and additionally that a <%block> can’t be defined +inside of a <%def>. It’s a more restricted tag suited towards a more specific use case than <%def>.

      +
      +
      +

      Using the next Namespace to Produce Content Wrapping??

      +

      Sometimes you have an inheritance chain that spans more than two +templates. Or maybe you don’t, but you’d like to build your +system such that extra inherited templates can be inserted in +the middle of a chain where they would be smoothly integrated. +If each template wants to define its layout just within its main +body, you can’t just call self.body() to get at the +inheriting template’s body, since that is only the topmost body. +To get at the body of the next template, you call upon the +namespace next, which is the namespace of the template +immediately following the current template.

      +

      Lets change the line in base.html which calls upon +self.body() to instead call upon next.body():

      +
      ## base.html
      +<html>
      +    <body>
      +        <div class="header">
      +            <%block name="header"/>
      +        </div>
      +
      +        ${next.body()}
      +
      +        <div class="footer">
      +            <%block name="footer">
      +                this is the footer
      +            </%block>
      +        </div>
      +    </body>
      +</html>
      +
      +
      +

      Lets also add an intermediate template called layout.html, +which inherits from base.html:

      +
      ## layout.html
      +<%inherit file="base.html"/>
      +<ul>
      +    <%block name="toolbar">
      +        <li>selection 1</li>
      +        <li>selection 2</li>
      +        <li>selection 3</li>
      +    </%block>
      +</ul>
      +<div class="mainlayout">
      +    ${next.body()}
      +</div>
      +
      +
      +

      And finally change index.html to inherit from +layout.html instead:

      +
      ## index.html
      +<%inherit file="layout.html"/>
      +
      +## .. rest of template
      +
      +
      +

      In this setup, each call to next.body() will render the body +of the next template in the inheritance chain (which can be +written as base.html -> layout.html -> index.html). Control +is still first passed to the bottommost template base.html, +and self still references the topmost definition of any +particular def.

      +

      The output we get would be:

      +
      <html>
      +    <body>
      +        <div class="header">
      +            this is some header content
      +        </div>
      +
      +        <ul>
      +            <li>selection 1</li>
      +            <li>selection 2</li>
      +            <li>selection 3</li>
      +        </ul>
      +
      +        <div class="mainlayout">
      +        this is the body content.
      +        </div>
      +
      +        <div class="footer">
      +            this is the footer
      +        </div>
      +    </body>
      +</html>
      +
      +
      +

      So above, we have the <html>, <body> and +header/footer layout of base.html, we have the +<ul> and mainlayout section of layout.html, and the +main body of index.html as well as its overridden header +def. The layout.html template is inserted into the middle of +the chain without base.html having to change anything. +Without the next namespace, only the main body of +index.html could be used; there would be no way to call +layout.html‘s body content.

      +
      +
      +

      Using the parent Namespace to Augment Defs??

      +

      Lets now look at the other inheritance-specific namespace, the +opposite of next called parent. parent is the +namespace of the template immediately preceding the current +template. What’s useful about this namespace is that +defs or blocks can call upon their overridden versions. +This is not as hard as it sounds and +is very much like using the super keyword in Python. Lets +modify index.html to augment the list of selections provided +by the toolbar function in layout.html:

      +
      ## index.html
      +<%inherit file="layout.html"/>
      +
      +<%block name="header">
      +    this is some header content
      +</%block>
      +
      +<%block name="toolbar">
      +    ## call the parent's toolbar first
      +    ${parent.toolbar()}
      +    <li>selection 4</li>
      +    <li>selection 5</li>
      +</%block>
      +
      +this is the body content.
      +
      +
      +

      Above, we implemented a toolbar() function, which is meant +to override the definition of toolbar within the inherited +template layout.html. However, since we want the content +from that of layout.html as well, we call it via the +parent namespace whenever we want it’s content, in this case +before we add our own selections. So the output for the whole +thing is now:

      +
      <html>
      +    <body>
      +        <div class="header">
      +            this is some header content
      +        </div>
      +
      +        <ul>
      +            <li>selection 1</li>
      +            <li>selection 2</li>
      +            <li>selection 3</li>
      +            <li>selection 4</li>
      +            <li>selection 5</li>
      +        </ul>
      +
      +        <div class="mainlayout">
      +        this is the body content.
      +        </div>
      +
      +        <div class="footer">
      +            this is the footer
      +        </div>
      +    </body>
      +</html>
      +
      +
      +

      and you’re now a template inheritance ninja!

      +
      +
      +

      Inheritable Attributes??

      +

      The attr accessor of the Namespace object +allows access to module level variables declared in a template. By accessing +self.attr, you can access regular attributes from the +inheritance chain as declared in <%! %> sections. Such as:

      +
      <%!
      +    class_ = "grey"
      +%>
      +
      +<div class="${self.attr.class_}">
      +    ${self.body()}
      +</div>
      +
      +
      +

      If an inheriting template overrides class_ to be +"white", as in:

      +
      <%!
      +    class_ = "white"
      +%>
      +<%inherit file="parent.html"/>
      +
      +This is the body
      +
      +
      +

      you’ll get output like:

      +
      <div class="white">
      +    This is the body
      +</div>
      +
      +
      +
      +
      + +
      + +
      + + + +
      + +
      + +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/namespaces.html b/lib3/Mako-0.7.3/doc/namespaces.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/namespaces.html @@ -0,0 +1,649 @@ + + + + + + + Namespaces + — + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + ?? + Namespaces + + +

      + + Namespaces + +

      +
      + +
      + +
      + + + +
      + +
      +

      Namespaces??

      +

      Namespaces are used to organize groups of defs into +categories, and also to “import” defs from other files.

      +

      If the file components.html defines these two defs:

      +
      ## components.html
      +<%def name="comp1()">
      +    this is comp1
      +</%def>
      +
      +<%def name="comp2(x)">
      +    this is comp2, x is ${x}
      +</%def>
      +
      +
      +

      you can make another file, for example index.html, that +pulls those two defs into a namespace called comp:

      +
      ## index.html
      +<%namespace name="comp" file="components.html"/>
      +
      +Here's comp1:  ${comp.comp1()}
      +Here's comp2:  ${comp.comp2(x=5)}
      +
      +
      +

      The comp variable above is an instance of +Namespace, a proxy object which delivers +method calls to the underlying template callable using the +current context.

      +

      <%namespace> also provides an import attribute which can +be used to pull the names into the local namespace, removing the +need to call it via the “.” operator. When import is used, the +name attribute is optional.

      +
      <%namespace file="components.html" import="comp1, comp2"/>
      +
      +Heres comp1:  ${comp1()}
      +Heres comp2:  ${comp2(x=5)}
      +
      +
      +

      import also supports the “*” operator:

      +
      <%namespace file="components.html" import="*"/>
      +
      +Heres comp1:  ${comp1()}
      +Heres comp2:  ${comp2(x=5)}
      +
      +
      +

      The names imported by the import attribute take precedence +over any names that exist within the current context.

      +
      +

      Note

      +

      In current versions of Mako, usage of import='*' is +known to decrease performance of the template. This will be +fixed in a future release.

      +
      +

      The file argument allows expressions – if looking for +context variables, the context must be named explicitly:

      +
      <%namespace name="dyn" file="${context['namespace_name']}"/>
      +
      +
      +
      +

      Ways to Call Namespaces??

      +

      There are essentially four ways to call a function from a +namespace.

      +

      The “expression” format, as described previously. Namespaces are +just Python objects with functions on them, and can be used in +expressions like any other function:

      +
      ${mynamespace.somefunction('some arg1', 'some arg2', arg3='some arg3', arg4='some arg4')}
      +
      +
      +

      Synonymous with the “expression” format is the “custom tag” +format, when a “closed” tag is used. This format, introduced in +Mako 0.2.3, allows the usage of a “custom” Mako tag, with the +function arguments passed in using named attributes:

      +
      <%mynamespace:somefunction arg1="some arg1" arg2="some arg2" arg3="some arg3" arg4="some arg4"/>
      +
      +
      +

      When using tags, the values of the arguments are taken as +literal strings by default. To embed Python expressions as +arguments, use the embedded expression format:

      +
      <%mynamespace:somefunction arg1="${someobject.format()}" arg2="${somedef(5, 12)}"/>
      +
      +
      +

      The “custom tag” format is intended mainly for namespace +functions which recognize body content, which in Mako is known +as a “def with embedded content”:

      +
      <%mynamespace:somefunction arg1="some argument" args="x, y">
      +    Some record: ${x}, ${y}
      +</%mynamespace:somefunction>
      +
      +
      +

      The “classic” way to call defs with embedded content is the <%call> tag:

      +
      <%call expr="mynamespace.somefunction(arg1='some argument')" args="x, y">
      +    Some record: ${x}, ${y}
      +</%call>
      +
      +
      +

      For information on how to construct defs that embed content from +the caller, see Calling a Def with Embedded Content and/or Other Defs.

      +
      +
      +

      Namespaces from Regular Python Modules??

      +

      Namespaces can also import regular Python functions from +modules. These callables need to take at least one argument, +context, an instance of Context. A module file +some/module.py might contain the callable:

      +
      def my_tag(context):
      +    context.write("hello world")
      +    return ''
      +
      +
      +

      A template can use this module via:

      +
      <%namespace name="hw" module="some.module"/>
      +
      +${hw.my_tag()}
      +
      +
      +

      Note that the context argument is not needed in the call; +the Namespace tag creates a locally-scoped callable which +takes care of it. The return '' is so that the def does not +dump a None into the output stream – the return value of any +def is rendered after the def completes, in addition to whatever +was passed to Context.write() within its body.

      +

      If your def is to be called in an “embedded content” context, +that is as described in Calling a Def with Embedded Content and/or Other Defs, you should use +the supports_caller() decorator, which will ensure that Mako +will ensure the correct “caller” variable is available when your +def is called, supporting embedded content:

      +
      from mako.runtime import supports_caller
      +
      +@supports_caller
      +def my_tag(context):
      +    context.write("<div>")
      +    context['caller'].body()
      +    context.write("</div>")
      +    return ''
      +
      +
      +

      Capturing of output is available as well, using the +outside-of-templates version of the capture() function, +which accepts the “context” as its first argument:

      +
      from mako.runtime import supports_caller, capture
      +
      +@supports_caller
      +def my_tag(context):
      +    return "<div>%s</div>" % \
      +            capture(context, context['caller'].body, x="foo", y="bar")
      +
      +
      +
      +
      +

      Declaring Defs in Namespaces??

      +

      The <%namespace> tag supports the definition of <%def>s +directly inside the tag. These defs become part of the namespace +like any other function, and will override the definitions +pulled in from a remote template or module:

      +
      ## define a namespace
      +<%namespace name="stuff">
      +    <%def name="comp1()">
      +        comp1
      +    </%def>
      +</%namespace>
      +
      +## then call it
      +${stuff.comp1()}
      +
      +
      +
      +
      +

      The body() Method??

      +

      Every namespace that is generated from a template contains a +method called body(). This method corresponds to the main +body of the template, and plays its most important roles when +using inheritance relationships as well as +def-calls-with-content.

      +

      Since the body() method is available from a namespace just +like all the other defs defined in a template, what happens if +you send arguments to it? By default, the body() method +accepts no positional arguments, and for usefulness in +inheritance scenarios will by default dump all keyword arguments +into a dictionary called pageargs. But if you actually want +to get at the keyword arguments, Mako recommends you define your +own argument signature explicitly. You do this via using the +<%page> tag:

      +
      <%page args="x, y, someval=8, scope='foo', **kwargs"/>
      +
      +
      +

      A template which defines the above signature requires that the +variables x and y are defined, defines default values +for someval and scope, and sets up **kwargs to +receive all other keyword arguments. If **kwargs or similar +is not present, the argument **pageargs gets tacked on by +Mako. When the template is called as a top-level template (i.e. +via render()) or via the <%include> tag, the +values for these arguments will be pulled from the Context. +In all other cases, i.e. via calling the body() method, the +arguments are taken as ordinary arguments from the method call. +So above, the body might be called as:

      +
      ${self.body(5, y=10, someval=15, delta=7)}
      +
      +
      +

      The Context object also supplies a kwargs accessor, for +cases when you’d like to pass along whatever is in the context to +a body() callable:

      +
      ${next.body(**context.kwargs)}
      +
      +
      +

      The usefulness of calls like the above become more apparent when +one works with inheriting templates. For more information on +this, as well as the meanings of the names self and +next, see Inheritance.

      +
      +
      +

      Built-in Namespaces??

      +

      The namespace is so great that Mako gives your template one (or +two) for free. The names of these namespaces are local and +self. Other built-in namespaces include parent and +next, which are optional and are described in +Inheritance.

      +
      +

      local??

      +

      The local namespace is basically the namespace for the +currently executing template. This means that all of the top +level defs defined in your template, as well as your template’s +body() function, are also available off of the local +namespace.

      +

      The local namespace is also where properties like uri, +filename, and module and the get_namespace method +can be particularly useful.

      +
      +
      +

      self??

      +

      The self namespace, in the case of a template that does not +use inheritance, is synonymous with local. If inheritance is +used, then self references the topmost template in the +inheritance chain, where it is most useful for providing the +ultimate form of various “method” calls which may have been +overridden at various points in an inheritance chain. See +Inheritance.

      +
      +
      +
      +

      Inheritable Namespaces??

      +

      The <%namespace> tag includes an optional attribute +inheritable="True", which will cause the namespace to be +attached to the self namespace. Since self is globally +available throughout an inheritance chain (described in the next +section), all the templates in an inheritance chain can get at +the namespace imported in a super-template via self.

      +
      ## base.html
      +<%namespace name="foo" file="foo.html" inheritable="True"/>
      +
      +${next.body()}
      +
      +## somefile.html
      +<%inherit file="base.html"/>
      +
      +${self.foo.bar()}
      +
      +
      +

      This allows a super-template to load a whole bunch of namespaces +that its inheriting templates can get to, without them having to +explicitly load those namespaces themselves.

      +

      The import="*" part of the <%namespace> tag doesn’t yet +interact with the inheritable flag, so currently you have to +use the explicit namespace name off of self, followed by the +desired function name. But more on this in a future release.

      +
      +
      +

      API Reference??

      +
      +
      +class mako.runtime.Namespace(name, context, callables=None, inherits=None, populate_self=True, calling_uri=None)??
      +

      Bases: object

      +

      Provides access to collections of rendering methods, which +can be local, from other templates, or from imported modules.

      +

      To access a particular rendering method referenced by a +Namespace, use plain attribute access:

      +
      ${some_namespace.foo(x, y, z)}
      +
      +
      +

      Namespace also contains several built-in attributes +described here.

      +
      +
      +attr??
      +

      Access module level attributes by name.

      +

      This accessor allows templates to supply “scalar” +attributes which are particularly handy in inheritance +relationships. See the example in +Inheritance.

      +
      + +
      +
      +cache??
      +

      Return the Cache object referenced +by this Namespace object’s +Template.

      +
      + +
      +
      +context = None??
      +

      The Context object for this Namespace.

      +

      Namespaces are often created with copies of contexts that +contain slightly different data, particularly in inheritance +scenarios. Using the Context off of a Namespace one +can traverse an entire chain of templates that inherit from +one-another.

      +
      + +
      +
      +filename = None??
      +

      The path of the filesystem file used for this +Namespace‘s module or template.

      +

      If this is a pure module-based +Namespace, this evaluates to module.__file__. If a +template-based namespace, it evaluates to the original +template file location.

      +
      + +
      +
      +get_cached(key, **kwargs)??
      +

      Return a value from the Cache referenced by this +Namespace object’s Template.

      +

      The advantage to this method versus direct access to the +Cache is that the configuration parameters +declared in <%page> take effect here, thereby calling +up the same configured backend as that configured +by <%page>.

      +
      + +
      +
      +get_namespace(uri)??
      +

      Return a Namespace corresponding to the given uri.

      +

      If the given uri is a relative URI (i.e. it does not +contain a leading slash /), the uri is adjusted to +be relative to the uri of the namespace itself. This +method is therefore mostly useful off of the built-in +local namespace, described in local.

      +

      In +most cases, a template wouldn’t need this function, and +should instead use the <%namespace> tag to load +namespaces. However, since all <%namespace> tags are +evaluated before the body of a template ever runs, +this method can be used to locate namespaces using +expressions that were generated within the body code of +the template, or to conditionally use a particular +namespace.

      +
      + +
      +
      +get_template(uri)??
      +

      Return a Template from the given uri.

      +

      The uri resolution is relative to the uri of this Namespace +object’s Template.

      +
      + +
      +
      +include_file(uri, **kwargs)??
      +

      Include a file at the given uri.

      +
      + +
      +
      +module = None??
      +

      The Python module referenced by this Namespace.

      +

      If the namespace references a Template, then +this module is the equivalent of template.module, +i.e. the generated module for the template.

      +
      + +
      +
      +template = None??
      +

      The Template object referenced by this +Namespace, if any.

      +
      + +
      +
      +uri = None??
      +

      The URI for this Namespace‘s template.

      +

      I.e. whatever was sent to TemplateLookup.get_template().

      +

      This is the equivalent of Template.uri.

      +
      + +
      + +
      +
      +class mako.runtime.TemplateNamespace(name, context, template=None, templateuri=None, callables=None, inherits=None, populate_self=True, calling_uri=None)??
      +

      Bases: mako.runtime.Namespace

      +

      A Namespace specific to a Template instance.

      +
      +
      +filename??
      +

      The path of the filesystem file used for this +Namespace‘s module or template.

      +
      + +
      +
      +module??
      +

      The Python module referenced by this Namespace.

      +

      If the namespace references a Template, then +this module is the equivalent of template.module, +i.e. the generated module for the template.

      +
      + +
      +
      +uri??
      +

      The URI for this Namespace‘s template.

      +

      I.e. whatever was sent to TemplateLookup.get_template().

      +

      This is the equivalent of Template.uri.

      +
      + +
      + +
      +
      +class mako.runtime.ModuleNamespace(name, context, module, callables=None, inherits=None, populate_self=True, calling_uri=None)??
      +

      Bases: mako.runtime.Namespace

      +

      A Namespace specific to a Python module instance.

      +
      +
      +filename??
      +

      The path of the filesystem file used for this +Namespace‘s module or template.

      +
      + +
      + +
      +
      +mako.runtime.supports_caller(func)??
      +

      Apply a caller_stack compatibility decorator to a plain +Python function.

      +

      See the example in Namespaces from Regular Python Modules.

      +
      + +
      +
      +mako.runtime.capture(context, callable_, *args, **kwargs)??
      +

      Execute the given template def, capturing the output into +a buffer.

      +

      See the example in Namespaces from Regular Python Modules.

      +
      + +
      +
      + +
      + +
      + + + +
      + +
      + +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/runtime.html b/lib3/Mako-0.7.3/doc/runtime.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/runtime.html @@ -0,0 +1,710 @@ + + + + + + + The Mako Runtime Environment + — + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + ?? + The Mako Runtime Environment + + +

      + + The Mako Runtime Environment + +

      +
      + +
      + +
      + + + +
      + +
      +

      The Mako Runtime Environment??

      +

      This section describes a little bit about the objects and +built-in functions that are available in templates.

      +
      +

      Context??

      +

      The Context is the central object that is created when +a template is first executed, and is responsible for handling +all communication with the outside world. Within the template +environment, it is available via the reserved name +context. The Context includes two +major components, one of which is the output buffer, which is a +file-like object such as Python’s StringIO or similar, and +the other a dictionary of variables that can be freely +referenced within a template; this dictionary is a combination +of the arguments sent to the render() function and +some built-in variables provided by Mako’s runtime environment.

      +
      +

      The Buffer??

      +

      The buffer is stored within the Context, and writing +to it is achieved by calling the write() method +– in a template this looks like context.write('some string'). +You usually don’t need to care about this, as all text within a template, as +well as all expressions provided by ${}, automatically send +everything to this method. The cases you might want to be aware +of its existence are if you are dealing with various +filtering/buffering scenarios, which are described in +Filtering and Buffering, or if you want to programmatically +send content to the output stream, such as within a <% %> +block.

      +
      <%
      +    context.write("some programmatic text")
      +%>
      +
      +
      +

      The actual buffer may or may not be the original buffer sent to +the Context object, as various filtering/caching +scenarios may “push” a new buffer onto the context’s underlying +buffer stack. For this reason, just stick with +context.write() and content will always go to the topmost +buffer.

      +
      +
      +

      Context Variables??

      +

      When your template is compiled into a Python module, the body +content is enclosed within a Python function called +render_body. Other top-level defs defined in the template are +defined within their own function bodies which are named after +the def’s name with the prefix render_ (i.e. render_mydef). +One of the first things that happens within these functions is +that all variable names that are referenced within the function +which are not defined in some other way (i.e. such as via +assignment, module level imports, etc.) are pulled from the +Context object’s dictionary of variables. This is how you’re +able to freely reference variable names in a template which +automatically correspond to what was passed into the current +Context.

      +
        +
      • What happens if I reference a variable name that is not in +the current context? - The value you get back is a special +value called UNDEFINED, or if the strict_undefined=True flag +is used a NameError is raised. UNDEFINED is just a simple global +variable with the class mako.runtime.Undefined. The +UNDEFINED object throws an error when you call str() on +it, which is what happens if you try to use it in an +expression.

        +
      • +
      • UNDEFINED makes it hard for me to find what name is missing - An alternative +is to specify the option strict_undefined=True +to the Template or TemplateLookup. This will cause +any non-present variables to raise an immediate NameError +which includes the name of the variable in its message +when render() is called – UNDEFINED is not used.

        +

        +New in version 0.3.6.

        +
      • +
      • Why not just return None? Using UNDEFINED, or +raising a NameError is more +explicit and allows differentiation between a value of None +that was explicitly passed to the Context and a value that +wasn’t present at all.

        +
      • +
      • Why raise an exception when you call str() on it ? Why not +just return a blank string? - Mako tries to stick to the +Python philosophy of “explicit is better than implicit”. In +this case, it’s decided that the template author should be made +to specifically handle a missing value rather than +experiencing what may be a silent failure. Since UNDEFINED +is a singleton object just like Python’s True or False, +you can use the is operator to check for it:

        +
        % if someval is UNDEFINED:
        +    someval is: no value
        +% else:
        +    someval is: ${someval}
        +% endif
        +
        +
        +
      • +
      +

      Another facet of the Context is that its dictionary of +variables is immutable. Whatever is set when +render() is called is what stays. Of course, since +its Python, you can hack around this and change values in the +context’s internal dictionary, but this will probably will not +work as well as you’d think. The reason for this is that Mako in +many cases creates copies of the Context object, which +get sent to various elements of the template and inheriting +templates used in an execution. So changing the value in your +local Context will not necessarily make that value +available in other parts of the template’s execution. Examples +of where Mako creates copies of the Context include +within top-level def calls from the main body of the template +(the context is used to propagate locally assigned variables +into the scope of defs; since in the template’s body they appear +as inlined functions, Mako tries to make them act that way), and +within an inheritance chain (each template in an inheritance +chain has a different notion of parent and next, which +are all stored in unique Context instances).

      +
        +
      • So what if I want to set values that are global to everyone +within a template request? - All you have to do is provide a +dictionary to your Context when the template first +runs, and everyone can just get/set variables from that. Lets +say its called attributes.

        +

        Running the template looks like:

        +
        output = template.render(attributes={})
        +
        +
        +

        Within a template, just reference the dictionary:

        +
        <%
        +    attributes['foo'] = 'bar'
        +%>
        +'foo' attribute is: ${attributes['foo']}
        +
        +
        +
      • +
      • Why can’t “attributes” be a built-in feature of the +Context? - This is an area where Mako is trying to make as +few decisions about your application as it possibly can. +Perhaps you don’t want your templates to use this technique of +assigning and sharing data, or perhaps you have a different +notion of the names and kinds of data structures that should +be passed around. Once again Mako would rather ask the user to +be explicit.

        +
      • +
      +
      +
      +

      Context Methods and Accessors??

      +

      Significant members of Context include:

      +
        +
      • context[key] / context.get(key, default=None) - +dictionary-like accessors for the context. Normally, any +variable you use in your template is automatically pulled from +the context if it isn’t defined somewhere already. Use the +dictionary accessor and/or get method when you want a +variable that is already defined somewhere else, such as in +the local arguments sent to a %def call. If a key is not +present, like a dictionary it raises KeyError.

        +
      • +
      • keys() - all the names defined within this context.

        +
      • +
      • kwargs - this returns a copy of the context’s +dictionary of variables. This is useful when you want to +propagate the variables in the current context to a function +as keyword arguments, i.e.:

        +
        ${next.body(**context.kwargs)}
        +
        +
        +
      • +
      • write(text) - write some text to the current output +stream.

        +
      • +
      • lookup - returns the TemplateLookup instance that is +used for all file-lookups within the current execution (even +though individual Template instances can conceivably have +different instances of a TemplateLookup, only the +TemplateLookup of the originally-called Template gets +used in a particular execution).

        +
      • +
      +
      +
      +
      +

      The Loop Context??

      +

      Within % for blocks, the reserved name loop +is available. loop tracks the progress of +the for loop and makes it easy to use the iteration state to control +template behavior:

      +
      <ul>
      +% for a in ("one", "two", "three"):
      +    <li>Item ${loop.index}: ${a}</li>
      +% endfor
      +</ul>
      +
      +
      +

      +New in version 0.7.

      +
      +

      Iterations??

      +

      Regardless of the type of iterable you’re looping over, loop always tracks +the 0-indexed iteration count (available at loop.index), its parity +(through the loop.even and loop.odd bools), and loop.first, a bool +indicating whether the loop is on its first iteration. If your iterable +provides a __len__ method, loop also provides access to +a count of iterations remaining at loop.reverse_index and loop.last, +a bool indicating whether the loop is on its last iteration; accessing these +without __len__ will raise a TypeError.

      +
      +
      +

      Cycling??

      +

      Cycling is available regardless of whether the iterable you’re using provides +a __len__ method. Prior to Mako 0.7, you might have generated a simple +zebra striped list using enumerate:

      +
      <ul>
      +% for i, item in enumerate(('spam', 'ham', 'eggs')):
      +  <li class="${'odd' if i % 2 else 'even'}">${item}</li>
      +% endfor
      +</ul>
      +
      +
      +

      With loop.cycle, you get the same results with cleaner code and less prep work:

      +
      <ul>
      +% for item in ('spam', 'ham', 'eggs'):
      +  <li class="${loop.cycle('even', 'odd')}">${item}</li>
      +% endfor
      +</ul>
      +
      +
      +

      Both approaches produce output like the following:

      +
      <ul>
      +  <li class="even">spam</li>
      +  <li class="odd">ham</li>
      +  <li class="even">eggs</li>
      +</ul>
      +
      +
      +
      +
      +

      Parent Loops??

      +

      Loop contexts can also be transparently nested, and the Mako runtime will do +the right thing and manage the scope for you. You can access the parent loop +context through loop.parent.

      +

      This allows you to reach all the way back up through the loop stack by +chaining parent attribute accesses, i.e. loop.parent.parent.... as +long as the stack depth isn’t exceeded. For example, you can use the parent +loop to make a checkered table:

      +
      <table>
      +% for consonant in 'pbj':
      +  <tr>
      +  % for vowel in 'iou':
      +    <td class="${'black' if (loop.parent.even == loop.even) else 'red'}">
      +      ${consonant + vowel}t
      +    </td>
      +  % endfor
      +  </tr>
      +% endfor
      +</table>
      +
      +
      +
      <table>
      +  <tr>
      +    <td class="black">
      +      pit
      +    </td>
      +    <td class="red">
      +      pot
      +    </td>
      +    <td class="black">
      +      put
      +    </td>
      +  </tr>
      +  <tr>
      +    <td class="red">
      +      bit
      +    </td>
      +    <td class="black">
      +      bot
      +    </td>
      +    <td class="red">
      +      but
      +    </td>
      +  </tr>
      +  <tr>
      +    <td class="black">
      +      jit
      +    </td>
      +    <td class="red">
      +      jot
      +    </td>
      +    <td class="black">
      +      jut
      +    </td>
      +  </tr>
      +</table>
      +
      +
      +
      +
      +

      Migrating Legacy Templates that Use the Word “loop”??

      +

      +Changed in version 0.7: The loop name is now reserved in Mako, +which means a template that refers to a variable named loop +won’t function correctly when used in Mako 0.7.

      +

      To ease the transition for such systems, the feature can be disabled across the board for +all templates, then re-enabled on a per-template basis for those templates which wish +to make use of the new system.

      +

      First, the enable_loop=False flag is passed to either the TemplateLookup +or Template object in use:

      +
      lookup = TemplateLookup(directories=['/docs'], enable_loop=False)
      +
      +
      +

      or:

      +
      template = Template("some template", enable_loop=False)
      +
      +
      +

      An individual template can make usage of the feature when enable_loop is set to +False by switching it back on within the <%page> tag:

      +
      <%page enable_loop="True"/>
      +
      +% for i in collection:
      +    ${i} ${loop.index}
      +% endfor
      +
      +
      +

      Using the above scheme, it’s safe to pass the name loop to the Template.render() +method as well as to freely make usage of a variable named loop within a template, provided +the <%page> tag doesn’t override it. New templates that want to use the loop context +can then set up <%page enable_loop="True"/> to use the new feature without affecting +old templates.

      +
      +
      +
      +

      All the Built-in Names??

      +

      A one-stop shop for all the names Mako defines. Most of these +names are instances of Namespace, which are described +in the next section, Namespaces. Also, most of +these names other than context, UNDEFINED, and loop are +also present within the Context itself. The names +context, loop and UNDEFINED themselves can’t be passed +to the context and can’t be substituted – see the section Reserved Names.

      +
        +
      • context - this is the Context object, introduced +at Context.
      • +
      • local - the namespace of the current template, described +in Built-in Namespaces.
      • +
      • self - the namespace of the topmost template in an +inheritance chain (if any, otherwise the same as local), +mostly described in Inheritance.
      • +
      • parent - the namespace of the parent template in an +inheritance chain (otherwise undefined); see +Inheritance.
      • +
      • next - the namespace of the next template in an +inheritance chain (otherwise undefined); see +Inheritance.
      • +
      • caller - a “mini” namespace created when using the +<%call> tag to define a “def call with content”; described +in Calling a Def with Embedded Content and/or Other Defs.
      • +
      • loop - this provides access to LoopContext objects when +they are requested within % for loops, introduced at The Loop Context.
      • +
      • capture - a function that calls a given def and captures +its resulting content into a string, which is returned. Usage +is described in Filtering and Buffering.
      • +
      • UNDEFINED - a global singleton that is applied to all +otherwise uninitialized template variables that were not +located within the Context when rendering began, +unless the Template flag strict_undefined +is set to True. UNDEFINED is +an instance of Undefined, and raises an +exception when its __str__() method is called.
      • +
      • pageargs - this is a dictionary which is present in a +template which does not define any **kwargs section in its +<%page> tag. All keyword arguments sent to the body() +function of a template (when used via namespaces) go here by +default unless otherwise defined as a page argument. If this +makes no sense, it shouldn’t; read the section +The body() Method.
      • +
      +
      +

      Reserved Names??

      +

      Mako has a few names that are considered to be “reserved” and can’t be used +as variable names.

      +

      +Changed in version 0.7: Mako raises an error if these words are found passed to the template +as context arguments, whereas in previous versions they’d be silently +ignored or lead to other error messages.

      + +
      +
      +
      +

      API Reference??

      +
      +
      +class mako.runtime.Context(buffer, **data)??
      +

      Bases: object

      +

      Provides runtime namespace, output buffer, and various +callstacks for templates.

      +

      See The Mako Runtime Environment for detail on the usage of +Context.

      +
      +
      +get(key, default=None)??
      +

      Return a value from this Context.

      +
      + +
      +
      +keys()??
      +

      Return a list of all names established in this Context.

      +
      + +
      +
      +kwargs??
      +

      Return the dictionary of keyword arguments associated with this +Context.

      +
      + +
      +
      +locals_(d)??
      +

      Create a new Context with a copy of this +Context‘s current state, updated with the given dictionary.

      +
      + +
      +
      +lookup??
      +

      Return the TemplateLookup associated +with this Context.

      +
      + +
      +
      +pop_caller()??
      +

      Pop a caller callable onto the callstack for this +Context.

      +
      + +
      +
      +push_caller(caller)??
      +

      Push a caller callable onto the callstack for +this Context.

      +
      + +
      +
      +write(string)??
      +

      Write a string to this Context object’s +underlying output buffer.

      +
      + +
      +
      +writer()??
      +

      Return the current writer function.

      +
      + +
      + +
      +
      +class mako.runtime.LoopContext(iterable)??
      +

      Bases: object

      +

      A magic loop variable. +Automatically accessible in any % for block.

      +

      See the section The Loop Context for usage +notes.

      +
      +
      parent -> LoopContext or None
      +
      The parent loop, if one exists.
      +
      index -> int
      +
      The 0-based iteration count.
      +
      reverse_index -> int
      +
      The number of iterations remaining.
      +
      first -> bool
      +
      True on the first iteration, False otherwise.
      +
      last -> bool
      +
      True on the last iteration, False otherwise.
      +
      even -> bool
      +
      True when index is even.
      +
      odd -> bool
      +
      True when index is odd.
      +
      +
      +
      +cycle(*values)??
      +

      Cycle through values as the loop progresses.

      +
      + +
      + +
      +
      +class mako.runtime.Undefined??
      +

      Bases: object

      +

      Represents an undefined value in a template.

      +

      All template modules have a constant value +UNDEFINED present which is an instance of this +object.

      +
      + +
      +
      + +
      + +
      + + + +
      + +
      + +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/search.html b/lib3/Mako-0.7.3/doc/search.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/search.html @@ -0,0 +1,162 @@ + + + + + + + Search + — + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + ?? + Search + + +

      + + Search + +

      +
      + +
      + +
      + + +
      + + + + + + +
      +

      Enter Search Terms:

      + +
      + +
      + + + +
      + +
      + + + +
      + +
      + + +
      + + + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/searchindex.js b/lib3/Mako-0.7.3/doc/searchindex.js new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/searchindex.js @@ -0,0 +1,1 @@ +Search.setIndex({objects:{RichTraceback:{records:[8,0,1,""],reverse_traceback:[8,0,1,""],source:[8,0,1,""],lineno:[8,0,1,""],reverse_records:[8,0,1,""],error:[8,0,1,""],message:[8,0,1,""]},"mako.exceptions":{text_error_template:[8,3,1,""],RichTraceback:[8,2,1,""],html_error_template:[8,3,1,""]},"mako.lookup.TemplateLookup":{put_template:[8,1,1,""],get_template:[8,1,1,""],put_string:[8,1,1,""],adjust_uri:[8,1,1,""],filename_to_uri:[8,1,1,""]},"mako.runtime.Context":{write:[9,1,1,""],get:[9,1,1,""],keys:[9,1,1,""],push_caller:[9,1,1,""],writer:[9,1,1,""],pop_caller:[9,1,1,""],locals_:[9,1,1,""],lookup:[9,0,1,""],kwargs:[9,0,1,""]},"mako.lookup.TemplateCollection":{get_template:[8,1,1,""],adjust_uri:[8,1,1,""],has_template:[8,1,1,""],filename_to_uri:[8,1,1,""]},"mako.runtime.ModuleNamespace":{filename:[3,0,1,""]},"mako.cache.CacheImpl":{invalidate:[6,1,1,""],get_or_create:[6,1,1,""],pass_context:[6,0,1,""],set:[6,1,1,""],get:[6,1,1,""]},"mako.runtime.LoopContext":{cycle:[9,1,1,""]},"mako.cache":{register_plugin:[6,3,1,""],CacheImpl:[6,2,1,""],Cache:[6,2,1,""]},"mako.runtime.Namespace":{include_file:[3,1,1,""],template:[3,0,1,""],get_cached:[3,1,1,""],get_namespace:[3,1,1,""],cache:[3,0,1,""],uri:[3,0,1,""],module:[3,0,1,""],filename:[3,0,1,""],context:[3,0,1,""],get_template:[3,1,1,""],attr:[3,0,1,""]},"mako.lookup":{TemplateLookup:[8,2,1,""],TemplateCollection:[8,2,1,""]},"mako.runtime.TemplateNamespace":{uri:[3,0,1,""],module:[3,0,1,""],filename:[3,0,1,""]},"mako.runtime":{capture:[3,3,1,""],Undefined:[9,2,1,""],Namespace:[3,2,1,""],ModuleNamespace:[3,2,1,""],supports_caller:[3,3,1,""],Context:[9,2,1,""],LoopContext:[9,2,1,""],TemplateNamespace:[3,2,1,""]},"mako.ext.beaker_cache":{BeakerCacheImpl:[6,2,1,""]},"mako.cache.Cache":{invalidate:[6,1,1,""],set:[6,1,1,""],invalidate_body:[6,1,1,""],get:[6,1,1,""],invalidate_closure:[6,1,1,""],invalidate_def:[6,1,1,""],starttime:[6,0,1,""],put:[6,1,1,""],get_or_create:[6,1,1,""],id:[6,0,1,""],impl:[6,0,1,""]},"mako.template.Template":{render_context:[8,1,1,""],code:[8,0,1,""],render:[8,1,1,""],source:[8,0,1,""],render_unicode:[8,1,1,""],get_def:[8,1,1,""]},"mako.template":{DefTemplate:[8,2,1,""],Template:[8,2,1,""]}},terms:{interchang:4,four:[3,5],prefix:[6,9],dirnam:8,"_my_cache_work":6,typeerror:9,swap:6,under:[8,6,7],everi:[3,5],long_term:6,jack:8,voix:7,appar:[2,3],vast:7,pagearg:[0,9,3,7],get_templ:[8,6,3,7],buildtabl:0,cache_kei:6,direct:[0,8,6,3,5],batch:2,outputpath:8,second:[0,6,2],even:[5,9,7,4],"new":[0,8,9,6,4],ever:[3,7],metadata:5,widget:0,behavior:[8,2,9],here:[2,7,4,5,6,3,8,9],met:0,path:[8,6,3],interpret:[0,8,7],tagfilt:2,portabl:0,txt:[8,7,5],describ:[0,2,3,4,5,7,8,9],would:[0,2,4,6,7,8,9],call:[0,1,2,3,4,5,6,7,8,9],recommend:3,type:[0,5,6,7,8,9],until:8,relat:[8,6,7],notic:0,pkg_resourc:6,must:[0,8,6,3,7],join:7,setup:[8,6,4],work:[0,2,7,4,6,3,8,9],root:8,overrid:[7,4,6,3,8,9],give:[0,3],indic:[1,9,5],somefil:3,do_something_speci:6,want:[0,2,7,4,5,3,6,9],end:[5,2,4],quot:7,ordinari:3,output_encod:[8,7],how:[0,5,6,3,8,9],cheetah:7,updat:[8,9],module_filenam:8,recogn:[3,4],lai:0,after:[8,6,2,3,9],befor:[2,7,4,5,6,3,8],attempt:7,third:6,recompil:[8,6],maintain:[8,6],environ:[8,1,2,9,5],exclus:8,order:[8,6,5,7,4],origin:[2,7,6,3,8,9],somedata:0,over:[2,3,9],fall:6,becaus:[0,2,7,4],demarc:0,affect:9,flexibl:[5,2,4],myfil:5,streamlin:4,fix:[8,6,3],"__class__":8,better:[6,9],persist:6,easier:6,them:[0,2,4,3,8,9],thei:[0,2,4,5,7,8,9],safe:[9,7],default_filt:[8,2,7],jinja2:[0,5],html_error_templ:8,getvalu:[8,2],myghti:7,timeout:6,each:[0,4,5,6,7,8,9],side:[0,2,5],mean:[0,2,7,5,6,3,8,9],enorm:0,cacheimpl:[8,6],extract:8,expression_filt:2,unbound:8,goe:5,newli:6,content:[0,1,2,4,5,6,3,8,9],sane:0,mypackag:2,multilin:5,bottommost:4,free:3,standard:[6,7],argument:[0,1,2,3,5,6,7,8,9],traceback:8,moment:8,filter:[0,1,2,5,7,8,9],heck:[8,1,7],isn:9,text_error_templ:8,onto:[9,5],user:[8,6,9,7,5],rang:[0,5],render:[0,1,2,3,4,5,6,7,8,9],restrict:[0,4],unlik:7,alreadi:[8,9,4],wrapper:2,wasn:[9,4],agre:4,primari:8,lister:0,top:[0,2,4,3,6,9],sometim:[5,4],stack:[8,2,9],cache_region:6,too:[2,4],similarli:[0,7],john:0,consol:7,conson:9,namespac:[0,1,2,4,5,6,3,8,9],tool:6,setuptool:[8,6],somewhat:[0,2],some_namespac:3,myfilt:2,target:[0,2],keyword:[0,7,4,5,3,8,9],provid:[0,2,4,5,6,3,8,9],expr:[0,3],project:8,matter:4,minut:6,thu:8,mini:9,fashion:6,close:[0,8,3,5],runner:8,modern:6,htmlentityreplac:8,raw:[8,7],manner:2,templatelookup:[1,2,7,6,3,8,9],minu:6,latter:7,shall:4,usernam:[0,5],object:[0,2,3,4,5,6,7,8,9],regular:[0,1,2,3,4,5,7,8],phase:8,invalidate_closur:6,simplic:2,paradigm:4,don:[0,6,9,7,4],doc:[0,8,9,7,5],flow:5,doe:[0,7,4,5,3,9],declar:[0,1,2,4,5,3,8],tracelin:8,notion:9,opposit:4,"__str__":9,syntax:[0,1,2,8,5],get_resourc:5,involv:4,absolut:[0,7],layout:[0,8,4],acquir:[6,7],configur:[8,6,3,4],stop:[9,5],report:8,bar:[0,2,5,3,8,9],emb:[3,7,5],reload:8,short_term:6,black:9,elimin:7,result:[0,8,2,9,5],respons:[6,9,4],basemost:4,awar:[8,9,7],databas:0,urllib:2,implicit:[6,9],simplest:5,pybabel:8,awai:4,approach:[9,7,4],attribut:[0,1,2,4,5,6,3,8,9],preprocessor:8,easi:[2,9],howev:[0,6,3,7,4],against:[8,5],facet:9,logic:2,col:0,dogpil:6,guid:8,assum:[8,7],templatenamespac:3,three:[9,5],been:[8,6,2,3,7],accumul:5,much:[0,5,7,4],basic:[0,1,7,5,3,8],"__len__":9,quickli:7,disable_unicod:[8,2,7],ani:[0,2,3,4,5,6,7,8,9],multithread:6,lift:4,ident:8,servic:[6,7,5],properti:[8,3],calcul:8,printabl:7,kwarg:[8,6,9,3],somekei:6,sever:3,mako:[0,1,2,3,4,5,6,7,8,9],quand:7,perform:[8,2,3,7],make:[0,2,3,4,5,6,7,8,9],transpar:9,push_cal:9,complet:[8,3,4],rais:[0,8,9,7],caller_stack:[3,7],scenario:[0,8,9,3],get_cach:3,hypothet:4,inherit:[0,1,4,5,6,3,8,9],thi:[0,2,3,4,5,6,7,8,9],endif:[0,9,5],programm:7,everyth:9,pagecontrol:0,left:[8,2],identifi:[8,6],just:[0,2,3,4,5,7,8,9],invalidate_bodi:6,unbuff:2,yet:[6,3],languag:[0,7,5],previous:[8,3],enable_loop:[8,9],pygmentplugin:8,ham:9,ell:7,had:7,input_encod:[8,7],board:9,els:[8,6,9,5],gave:8,opt:[8,6],applic:[8,2,9,5],mayb:4,background:7,specif:[0,2,4,5,6,3,8,9],arbitrari:[6,5],manual:8,babelplugin:8,underli:[0,2,4,5,6,3,8,9],right:[0,2,9,7,5],old:9,deal:[9,7,5],excerpt:8,maxim:[6,7],percentag:8,intern:[8,6,2,7,9],subclass:[0,8,6],track:9,condit:[0,5,4],foo:[0,2,7,5,3,8,9],plu:8,bold:2,relationship:3,post:[0,8],"super":[6,3,7,4],plug:6,slightli:[3,7],surround:5,produc:[0,1,2,4,5,7,8,9],rudiment:[8,6],encod:[8,1,2,7],down:[6,7],lieu:8,wrap:[0,1,5,4],storag:7,accordingli:8,wai:[0,1,2,3,4,5,6,7,8,9],support:[0,7,5,6,3,8],transform:2,why:9,avail:[0,2,3,4,5,6,7,8,9],overhead:7,head:[0,8,4],form:[0,3,5],offer:[7,5],forc:8,get_def:[0,8,2],taken:[8,6,3],"true":[0,2,7,5,6,3,8,9],attr:[3,4],tell:5,emit:5,trim:[2,5],featur:[5,9,4],classic:[3,5],petit:7,"abstract":8,exist:[0,7,5,6,3,8,9],check:[8,5,9,7,4],when:[0,2,3,4,5,6,7,8,9],entrypoint:6,intend:[2,3],stringio:[8,9,7],intent:2,consid:[8,9],receiv:[0,2,3,5],faster:7,cache_en:[8,6],htdoc:8,ignor:[9,5],time:[0,1,2,4,6,8],push:9,serious:4,backward:6,concept:[0,8,5],chain:[5,9,3,4],skip:8,consum:[6,5],signific:[9,5],subcompon:0,row:0,decid:[9,5],middl:[5,2,4],depend:[8,5],mainlayout:[0,4],intermedi:4,decis:[9,7,4],sourc:[8,7,5],string:[2,7,5,6,3,8,9],word:[8,9,4],level:[0,1,2,3,4,5,6,7,8,9],did:4,iter:[0,9],item:[0,9,5],mycomp:6,quick:[0,5],lever:7,div:[0,8,3,4],dir:[8,6],slower:7,sign:5,namepacenam:0,appear:[2,9,5],current:[0,4,5,3,8,9],deriv:[6,2,7],gener:[0,2,3,4,5,6,7,8,9],address:[8,6],along:[6,3],toolbar:[5,4],bot:9,behav:0,commonli:[6,7,4],semant:[0,4],regardless:[0,9,4],extra:[5,4],modul:[0,1,2,3,4,5,6,7,8,9],prefer:7,render_bodi:[8,2,7,9],fake:5,marker:5,instal:6,callstack:9,memori:[8,6,5],sake:2,perl:5,live:6,handler:8,scope:[0,4,5,3,6,9],prep:9,chapter:[0,1,2,4,7,8],afford:8,accept:[2,7,5,6,3,8],render_:9,myexpress:2,mylookup:[8,7],fly:7,graphic:7,uniqu:[6,9,4],whatev:[8,9,3,7],purpos:[8,7],stream:[8,9,3,7,5],backslash:5,occur:[0,8,6],alwai:[0,9,7,4],differenti:9,multipl:[0,1,6,7,4],get:[2,7,4,5,6,3,8,9],modulenam:6,write:[2,7,5,6,3,8,9],pure:[3,7,5],cache_xyz:6,somevalu:6,map:8,product:8,usabl:0,mai:[2,7,5,6,3,8,9],data:[0,7,6,3,8,9],goal:2,practic:4,explicit:[0,7,5,3,6,9],format_except:8,inform:[8,3,7,5],"switch":9,preced:[8,5,3,7,4],combin:9,callabl:[0,2,5,6,3,8,9],extractor:8,still:[0,7,4],mainli:3,dynam:5,entiti:2,conjunct:8,group:[6,3],platform:8,jit:9,push_buff:2,main:[0,2,7,4,3,8,9],non:[8,9,7,5],recal:4,francoi:8,contriv:8,supersed:6,initi:0,underneath:6,therebi:3,now:[5,9,7,4],pop_fram:7,term:6,name:[0,1,2,3,4,5,6,7,8,9],drop:5,separ:[0,6,2,4],"__str":7,compil:[8,6,9,5],replac:[8,6,2,7],individu:[0,8,9,6],arg3:3,arg4:3,continu:[8,7,5],zebra:9,happen:[9,3,7],accomplish:[8,6],space:[0,6,5],intermix:4,correct:3,earlier:[6,7],migrat:[8,9],"byte":[8,7],care:[6,9,3,7],thing:[0,5,9,7,4],place:[0,8,5,6,4],think:9,first:[2,7,4,5,3,6,9],oper:[0,2,7,5,3,8,9],suspend:5,directli:[0,2,5,6,3,8],onc:[0,5,9,4],arrai:2,yourself:[8,7],walkthrough:0,textual:[8,2,4],custom:[0,2,7,5,6,3,8],open:[5,7,4],size:8,given:[0,2,4,6,3,8,9],silent:9,convent:0,caught:8,checker:9,necessarili:9,white:4,conveni:6,programat:8,copi:[9,3],specifi:[1,2,4,6,7,8,9],enclos:[0,9],mostli:[8,6,9,3],than:[2,4,5,7,6,9],serv:[8,7],"__m_local":7,were:[9,3,7],posit:3,seri:[8,6,7],pre:8,sai:[0,1,4,7,8,9],put_str:8,anywher:[0,5],deliv:[2,3],notimplementederror:8,techniqu:9,pop_cal:9,note:[0,2,7,4,6,3,8,9],take:[0,2,7,4,5,3],blunt:6,sure:5,trace:8,normal:[0,2,5,7,8,9],buffer:[1,2,7,5,3,8,9],subdef:0,synonym:[6,3],later:[6,5],highlight:8,templatenam:8,runtim:[1,2,4,5,6,3,8,9],preambl:8,shop:9,imaginez:7,delta:3,permiss:8,hack:9,xml:[8,2,5],onli:[0,2,4,5,6,7,8,9],explicitli:[8,9,3,7],state:[0,9,7],dict:7,overwritten:6,variou:[2,7,5,3,8,9],distinctli:4,dyn:3,tailor:0,requir:[0,6,3,7,5],where:[0,2,3,4,5,6,7,8,9],summari:5,wonder:5,nestabl:0,enumer:9,between:[0,9],"import":[0,2,7,5,6,3,8,9],across:[6,9],parent:[0,1,4,6,3,8,9],comp:3,cycl:9,my_dogpile_region:6,uncondition:7,come:[0,4,5,6,7,8],cache_url:[8,6],region:6,mani:[9,5],pow:5,pot:9,inspir:5,pop:[6,2,9],colon:[6,5],encoding_error:[8,7],ultim:[3,4],dessin:7,markedli:7,resolut:[8,3],those:[0,5,6,3,8,9],"case":[0,2,3,4,5,6,7,8,9],mouton:7,invok:[0,8,5,4],cannot:[0,8,7],invoc:4,advantag:[8,3,7,4],stdout:8,threadsaf:6,destin:8,shutil:8,ascii:7,"__init__":6,develop:7,author:9,same:[0,7,5,6,3,8,9],binari:7,epoch:6,html:[0,2,3,4,5,6,7,8],document:[8,6,5,7,4],breakdown:4,finish:4,utf8:[2,7],nest:[0,1,2,4,6,9],capabl:[8,6],vowel:9,improv:[8,7,5],extern:[0,6,4],moder:8,facad:6,without:[6,9,3,4],model:0,roughli:5,execut:[0,2,4,5,3,6,9],rest:[6,5,4],aspect:[7,5],speed:[2,7],versu:[8,3],except:[0,1,2,5,6,8,9],littl:[0,8,9],treatment:7,role:3,earli:[1,5],ream:7,around:[8,2,9],read:[9,7,5],moi:7,world:[0,7,5,3,8,9],use_pag:2,serve_templ:8,integ:[0,6,7],server:[8,6,5],either:[4,5,6,7,8,9],output:[0,1,2,3,4,5,7,8,9],manag:[6,9],somefunct:3,definit:[0,2,3,4],disait:7,inject:8,refer:[0,1,2,3,4,5,6,7,8,9],some_templ:8,power:5,garbag:6,pass_context:6,starttim:6,found:[0,9,5],"__name__":[8,6],"throw":[6,9],central:[0,2,9,5],act:[8,9],mytempl:[8,6,7],routin:8,overrod:4,strip:2,your:[0,2,4,5,3,9],msgstr:8,fast:8,her:7,area:[0,5,9,7,4],aren:[0,7],start:[0,6],compliant:7,interfac:6,lot:5,strictli:7,programmat:[0,1,2,7,6,9],tupl:8,regard:7,jut:9,illus:7,pull:[0,8,9,3],possibl:[0,6,2,9],"default":[0,2,3,4,5,6,7,8,9],unusu:8,embed:[0,8,9,3,5],creat:[0,7,5,6,3,8,9],multibyt:8,certain:[8,2,7],somedef:[0,6,2,3,5],intro:0,decreas:3,file:[0,1,3,4,5,6,7,8,9],again:[0,9],gettext:8,field:8,valid:5,you:[0,2,3,4,5,6,7,8,9],symbol:5,reduc:2,directori:[8,6,2,7,9],descript:8,mimic:8,potenti:4,escap:[0,1,2,7,5],represent:[8,5],all:[0,1,2,3,4,5,6,7,8,9],illustr:[8,2,4],scalar:3,abil:[8,6,7,5],follow:[0,4,5,3,8,9],program:[7,5],objnam:6,introduc:[0,5,9,3,4],sound:[5,4],"pla\u00eet":7,liter:[8,3,7],fals:[8,6,9],util:8,mechan:[6,5,4],failur:9,veri:[0,8,6,4],condition:3,list:[0,2,4,5,6,7,8,9],adjust:[8,3,5],small:8,pbj:9,aptli:8,design:2,nsname:5,pass:[0,7,4,5,6,3,8,9],further:5,what:[0,1,3,4,5,7,8,9],sub:[0,5],section:[0,4,6,3,8,9],advanc:8,abl:9,brief:8,version:[0,2,7,4,5,3,9],deepli:0,method:[0,1,2,3,4,5,6,7,8,9],contrast:[0,7,4],full:[8,5],themselv:[0,4,5,3,6,9],shouldn:9,inher:2,modifi:[0,8,7,4],valu:[0,2,7,5,6,3,8,9],search:[8,1],memcach:6,prior:[2,7,9],real:[8,5],render_mydef:9,via:[0,7,4,6,3,8,9],transit:9,ask:9,href:0,pythagorean:5,establish:[8,9],select:[7,4],mylib:5,distinct:8,regist:6,two:[0,2,3,4,5,6,7,8,9],push_fram:7,minor:8,more:[0,2,3,4,5,6,7,8,9],desir:[8,3,7],flag:[2,7,5,6,3,8,9],stick:[0,9,7,5],particular:[8,6,9,3,4],known:[0,3],cach:[0,1,2,5,6,3,8,9],none:[8,6,2,3,9],remain:[0,9],learn:7,def:[0,1,2,3,4,5,6,7,8,9],someobject:3,userbas:7,share:[0,9],templat:[0,1,2,3,4,5,6,7,8,9],sharp:0,wsgiutil:8,cours:9,newlin:[1,5],rather:9,anoth:[0,7,5,3,8,9],render_unicod:[8,7],simpl:[0,8,9,6,5],css:8,regener:8,resourc:[8,4],referenc:[0,9,3],variant:5,catalog:8,associ:[6,9],"short":6,footer:[5,4],confus:7,caus:[8,2,3,9],egg:9,help:[8,6,7],singleton:9,through:[8,9,7],paramet:[8,6,3,7],style:[0,7],might:[9,3,5],wouldn:[3,5],good:6,"return":[0,1,2,3,5,6,7,8,9],timestamp:6,framework:[8,1],ninja:4,achiev:[0,9,4],fulli:[8,2],unicod:[8,1,2,7],locals_:9,hard:[5,9,7,4],idea:[5,4],procedur:6,realli:7,expect:[0,5,7,4],beyond:[0,6],orient:[0,4],sometempl:6,lineno:8,print:[0,8,7],proxi:3,ast:7,guess:7,reason:[9,7],base:[1,2,4,5,6,3,8,9],put:[6,9,4],basi:[6,9],thrown:[8,6],thread:6,perhap:[0,9,5],assign:[0,8,9,5],major:[9,7],number:[0,8,9,6,5],done:[8,7],defnam:[0,5],blank:9,miss:[8,2,9],differ:[0,4,6,3,8,9],interact:[3,7],least:[8,3],calling_uri:3,statement:[0,8,5],natur:2,scheme:[0,8,9,7],store:[8,6,2,7,9],option:[7,5,6,3,8,9],modulenamespac:3,part:[6,9,3,4],pars:[8,7,5],kind:[2,4,5,6,7,8,9],whenev:[8,7,4],remot:[0,3],remov:[3,7],str:[8,2,7,9],arrang:5,toward:[0,4],grei:4,cleaner:9,mytmpl:8,get_namespac:3,comp2:3,packag:[6,5],comp1:3,expir:6,deftempl:[0,8],jour:7,built:[0,1,2,3,4,5,7,8,9],equival:[0,2,7,5,3,6],self:[0,4,5,6,3,8,9],undeclar:8,also:[0,2,3,4,5,6,7,8,9],build:[0,4],distribut:8,filesystem:[8,6,3],reach:[8,9],disgard:7,most:[0,7,4,5,6,3,8,9],plai:[3,7],jsp:5,ext:[8,6],fastencodingbuff:7,wsgi:8,particularli:3,find:[8,9,5],mydef:0,coerc:7,pretti:[7,5],writer:9,hit:[2,7],"__file__":3,express:[0,1,2,3,4,5,7,8,9],nativ:7,common:[8,1,7],set:[2,7,5,6,3,8,9],genshi:5,dump:[3,7],see:[0,7,5,6,3,8,9],dumb:7,arg:[0,2,5,6,3,8],reserv:9,whatsoev:7,someth:[0,2,7],topmost:[5,9,3,4],won:[8,9,4],altern:[8,9,7],signatur:[0,3],syntact:5,numer:5,popul:6,both:[0,2,4,7,8,9],last:[8,9],put_templ:8,alor:7,context:[0,1,2,3,5,6,7,8,9],whole:[0,2,3,4],load:[8,3,5],simpli:[2,4],bell:7,arbitrarili:0,header:[0,8,5,6,4],uniniti:9,param:5,suppli:[3,5],frobnizzl:5,throughout:3,backend:[6,3],empti:[2,7],accessor:[6,9,3,4],strategi:6,error_handl:8,imag:[8,7],great:[3,7],understand:7,func:3,xa9:7,look:[0,7,4,6,3,8,9],get_or_cr:6,straight:[8,7],histor:6,"while":[0,8,5,2,4],abov:[0,2,3,4,5,6,7,8,9],error:[0,8,9,7],anonym:[0,6,5],everyon:9,loop:[0,1,8,9,5],pylon:8,propag:[8,9,5],richtraceback:8,vou:7,itself:[0,2,3,4,5,6,7,8,9],decor:[1,2,3],minim:6,decod:[2,7],conflict:6,x80:7,wherea:[0,9,7,5],has_templ:8,stripe:9,pop_buff:2,typic:[8,6],recent:8,travers:3,task:6,older:0,cachemanag:6,entri:[8,6],somev:[9,3],elem:5,picki:7,endfor:[0,9,5],construct:[0,7,4,5,6,3,8],burden:7,sidebar:0,adjust_uri:8,msgid:8,theorem:5,input:[0,2,7],subsequ:6,format:[0,8,3,7,5],game:7,bit:[0,8,9],characterist:5,creation_funct:6,semi:6,whitespac:[0,2,5],resolv:8,collect:[8,6,9,3,7],"boolean":8,popular:8,encount:4,often:3,creation:[0,8,2,6],some:[0,2,3,4,5,6,7,8,9],back:[8,9,7,4],global:[0,6,9,3,4],understood:6,sampl:8,mirror:8,surpris:7,modulepath:6,though:[6,9,7,4],pep:7,per:[6,9,7,5],namespace_nam:3,substitut:[8,1,2,9,5],larg:4,slash:[3,5],leftmost:2,cgi:[2,7],buffer_filt:8,previou:[0,8,9,4],run:[8,9,3,7],namespacenam:[0,5],reverse_traceback:8,step:[0,8,7],loopcontext:9,from:[0,1,2,3,4,5,6,7,8,9],mynamespac:[3,5],exc_info:8,block:[0,1,2,4,5,6,9],within:[0,2,3,4,5,6,7,8,9],toplevelnotfound:8,ensur:[6,3,7],chang:[6,9,4],run_wsgi:8,span:[0,4],reverse_index:9,spam:9,bodi:[0,1,4,5,6,3,8,9],stylesheet:8,"long":[9,5],beaker_cach:6,includ:[0,5,6,3,8,9],suit:4,myfunc:5,properli:7,templatelookupexcept:8,link:8,translat:8,newer:[0,8],atom:8,line:[8,5,7,4],info:8,concaten:2,utf:[8,2,7],consist:5,caller:[0,2,3,9],my_tag:3,myescap:2,similar:[0,2,3,4,5,7,8,9],impl:6,constant:9,doesn:[0,9,3,4],repres:[0,5,6,7,8,9],modulename_cal:8,titl:[0,5,4],invalid:6,codec:[8,7],accountdata:0,draw:2,clean:[8,6],nightmar:7,bytestring_passthrough:8,xb4le:7,cache_typ:[8,6,5],depth:9,far:[8,5,7,4],hello:[0,8,3,7,5],code:[0,2,7,5,3,8,9],templatetext:[2,7],send:[2,7,4,3,8,9],sens:9,sent:[2,9,3,5],tri:[0,8,9],magic:[9,7],"try":[8,2,9,7,5],dealt:8,pleas:5,impli:8,cfg:8,odd:9,append:8,compat:[8,6,3],index:[1,4,5,3,8,9],compar:[8,7],xa9veil:7,access:[0,1,4,5,6,3,8,9],can:[0,2,3,4,5,6,7,8,9],len:5,closur:0,let:[0,8,9,4],becom:[8,3,7],sinc:[0,9,3,7,4],filesystem_check:8,convert:[8,6,7],convers:7,conceiv:9,ctx:8,implement:[0,8,6,4],appli:[0,2,7,5,3,8,9],approxim:8,mystuff:0,api:[1,2,6,3,8,9],immut:9,register_plugin:6,metaphor:4,commun:[0,9],next:[0,1,2,3,4,5,6,7,8,9],implic:7,few:9,trail:2,beakercacheimpl:6,account:0,retriev:6,augment:[0,1,4],obvious:4,control:[0,1,2,4,5,8,9],accountnam:0,process:[6,7,5],lock:6,slim:6,tag:[0,1,2,4,5,6,3,8,9],layoutdata:0,nari:7,instead:[0,2,7,4,3,8],templatecollect:8,overridden:[0,6,3,4],class_:4,tack:3,philosophi:9,callable_:[8,3],essenti:[6,3,7],correspond:[8,6,9,3,4],element:[0,8,9,7,5],issu:[0,8,7],allow:[0,2,3,4,5,6,7,8,9],elif:5,move:8,comma:[0,2],bunch:3,outer:0,chosen:6,myproj:8,bye:0,handl:[0,1,5,6,7,8,9],handi:3,"r\u00e9veill\u00e9":7,relativeto:8,somewher:[8,9,7],anyth:[0,7,4],nameerror:[8,9],mode:7,disregard:8,pygment:8,intellig:7,filehandl:7,our:[0,8,6,4],special:[8,6,2,9,5],out:[0,4,5,6,7,8],variabl:[0,4,5,3,8,9],contigu:5,categori:3,rel:[8,3],red:9,insid:[0,4,5,6,3,8],call_my_object:7,standalon:8,dictionari:[8,6,9,3],releas:[6,3,5],indent:5,xc3:7,could:4,lexer:[8,7,5],keep:0,outsid:[0,9,3,7,4],strict:8,system:[0,8,9,6,4],messag:[8,9],attach:3,"final":[2,7,4],cache_dir:[8,6],accompani:8,exactli:[0,4],filename_to_uri:8,structur:[1,9,5],charact:[8,7,5],simplecacheimpl:6,have:[0,2,3,4,5,6,7,8,9],tabl:[0,1,9],need:[0,7,4,6,3,8,9],turn:[2,7,4],babel:8,outward:8,builtin:7,best:5,which:[0,2,3,4,5,6,7,8,9],singl:[0,2,4,5,6,7,8],unless:[9,7],who:7,segment:7,"class":[0,7,4,6,3,8,9],url:[8,6,2,5],gather:7,request:[8,6,9],uri:[8,3,5],pipe:2,determin:[6,7],"_cach":6,fact:[0,7,4],render_context:8,dbm:6,text:[0,2,5,6,7,8,9],cache_timeout:[0,6],anywai:8,locat:[8,9,3,7],should:[8,6,9,3],suppos:[0,6],local:[0,2,5,6,3,8,9],meant:4,familiar:[0,5],bean:8,cache_:6,increas:8,cstringio:[8,7],enabl:[8,6,9,5],organ:[3,4],current_sect:5,stuff:[3,5],integr:[8,1,4],contain:[0,7,5,6,3,8],view:8,reverse_record:8,legaci:[8,9],collection_s:8,pinard:8,flip:0,bytestr:8,mako_modul:8,polymorph:5,correctli:9,pattern:8,written:[8,5,4],progress:9,neither:6,email:5,jot:9,kei:[8,6,9,3,7],module_directori:[8,6],tempfil:8,job:2,entir:[0,1,2,3,5,7,8],joe:5,cache_arg:[8,6],addit:[0,2,4,5,6,3,8],plugin:[6,1],etc:[0,8,9,6,5],instanc:[8,9,3,5],freeli:9,comment:[8,1,7,5],guidelin:6,mako_cach:6,respect:5,addition:[8,7,4],compon:[6,9,3],include_fil:3,treat:7,immedi:[8,5,9,7,4],upcom:6,togeth:4,present:[0,7,4,6,3,8,9],determinist:2,therefor:[8,5,3,4],plain:[0,8,3,7,5],contextu:0,defin:[0,1,2,3,4,5,6,7,8,9],helper:8,almost:[5,4],incom:7,revis:8,parti:6,began:9,member:[8,9,5],python:[0,1,2,3,4,5,7,8,9],denot:5,iou:9,upon:[0,8,2,6,4],effect:[0,6,2,3],distutil:8,markupsaf:[2,7],off:[0,6,2,3,4],mention:7,well:[0,2,3,4,5,6,7,8,9],exampl:[0,2,4,5,6,3,8,9],command:8,choos:7,undefin:[0,8,9,5],usual:[8,6,2,9,5],module_writ:8,less:[9,7,4],heavili:7,web:8,point:[0,7,4,5,6,3,8],add:[0,8,2,4],lookup:[8,6,9,7],dest:8,arguabl:7,cache_impl:[8,6],five:[6,5],know:[7,4],xe2:7,mkstemp:8,insert:4,like:[0,2,3,4,5,6,7,8,9],success:8,page:[0,1,2,4,5,6,3,8,9],unreach:0,exceed:9,revers:8,captur:[8,2,3,9],pariti:9,"export":[0,5,4],smoothli:4,proper:8,librari:[0,8,2,6,7],tmp:[8,2],lead:[2,3,9],usag:[0,1,2,3,4,6,7,8,9],nutshel:4,although:5,stage:8,beaker:[8,6],about:[8,1,5,9,4],actual:[0,2,4,5,6,3,8,9],column:0,htmlentitydef:2,discard:2,x99a:7,disabl:[1,2,6,7,8,9],own:[0,2,3,4,5,6,7,8,9],populate_self:3,automat:[0,2,4,5,6,8,9],"dr\u00f4le":7,leverag:5,quote_plu:2,inner:0,arg1:3,arg2:3,"function":[0,2,3,4,5,6,7,8,9],keyerror:9,invalidate_def:6,eas:[2,9],inlin:[5,9,4],buf:[8,2],wherev:7,count:[0,8,9],made:[6,9],whether:[2,9,5],wish:[6,2,9],displai:8,record:[8,3,5],below:[0,7,4],otherwis:[8,9,7,5],evalu:[0,3,5],"int":9,dure:[8,4],filenam:[8,6,3],twist:0,pit:9,probabl:[0,8,9,6],mutual:8,percent:5,detail:[0,4,5,7,8,9],other:[0,2,3,4,5,6,7,8,9],bool:9,futur:[3,5],varieti:[6,5],post_pros:0,supports_cal:3,templateuri:3,some_condit:0,stai:[6,9],experienc:9,strict_undefin:[8,9],rule:[0,7,4],portion:0},objtypes:{"0":"py:attribute","1":"py:method","2":"py:class","3":"py:function"},titles:["Defs and Blocks","Table of Contents","Filtering and Buffering","Namespaces","Inheritance","Syntax","Caching","The Unicode Chapter","Usage","The Mako Runtime Environment"],objnames:{"0":["py","attribute","Python attribute"],"1":["py","method","Python method"],"2":["py","class","Python class"],"3":["py","function","Python function"]},filenames:["defs","index","filtering","namespaces","inheritance","syntax","caching","unicode","usage","runtime"]}) \ No newline at end of file diff --git a/lib3/Mako-0.7.3/doc/syntax.html b/lib3/Mako-0.7.3/doc/syntax.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/syntax.html @@ -0,0 +1,596 @@ + + + + + + + Syntax + — + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + ?? + Syntax + + +

      + + Syntax + +

      +
      + +
      + +
      + + + +
      + +
      +

      Syntax??

      +

      A Mako template is parsed from a text stream containing any kind +of content, XML, HTML, email text, etc. The template can further +contain Mako-specific directives which represent variable and/or +expression substitutions, control structures (i.e. conditionals +and loops), server-side comments, full blocks of Python code, as +well as various tags that offer additional functionality. All of +these constructs compile into real Python code. This means that +you can leverage the full power of Python in almost every aspect +of a Mako template.

      +
      +

      Expression Substitution??

      +

      The simplest expression is just a variable substitution. The +syntax for this is the ${} construct, which is inspired by +Perl, Genshi, JSP EL, and others:

      +
      this is x: ${x}
      +
      +
      +

      Above, the string representation of x is applied to the +template’s output stream. If you’re wondering where x comes +from, it’s usually from the Context supplied to the +template’s rendering function. If x was not supplied to the +template and was not otherwise assigned locally, it evaluates to +a special value UNDEFINED. More on that later.

      +

      The contents within the ${} tag are evaluated by Python +directly, so full expressions are OK:

      +
      pythagorean theorem:  ${pow(x,2) + pow(y,2)}
      +
      +
      +

      The results of the expression are evaluated into a string result +in all cases before being rendered to the output stream, such as +the above example where the expression produces a numeric +result.

      +
      +
      +

      Expression Escaping??

      +

      Mako includes a number of built-in escaping mechanisms, +including HTML, URI and XML escaping, as well as a “trim” +function. These escapes can be added to an expression +substitution using the | operator:

      +
      ${"this is some text" | u}
      +
      +
      +

      The above expression applies URL escaping to the expression, and +produces this+is+some+text. The u name indicates URL +escaping, whereas h represents HTML escaping, x +represents XML escaping, and trim applies a trim function.

      +

      Read more about built-in filtering functions, including how to +make your own filter functions, in Filtering and Buffering.

      +
      +
      +

      Control Structures??

      +

      A control structure refers to all those things that control the +flow of a program – conditionals (i.e. if/else), loops (like +while and for), as well as things like try/except. In Mako, +control structures are written using the % marker followed +by a regular Python control expression, and are “closed” by +using another % marker with the tag “end<name>”, where +“<name>” is the keyword of the expression:

      +
      % if x==5:
      +    this is some output
      +% endif
      +
      +
      +

      The % can appear anywhere on the line as long as no text +precedes it; indentation is not significant. The full range of +Python “colon” expressions are allowed here, including +if/elif/else, while, for, and even def, although +Mako has a built-in tag for defs which is more full-featured.

      +
      % for a in ['one', 'two', 'three', 'four', 'five']:
      +    % if a[0] == 't':
      +    its two or three
      +    % elif a[0] == 'f':
      +    four/five
      +    % else:
      +    one
      +    % endif
      +% endfor
      +
      +
      +

      The % sign can also be “escaped”, if you actually want to +emit a percent sign as the first non whitespace character on a +line, by escaping it as in %%:

      +
      %% some text
      +
      +    %% some more text
      +
      +
      +
      +

      The Loop Context??

      +

      The loop context provides additional information about a loop +while inside of a % for structure:

      +
      <ul>
      +% for a in ("one", "two", "three"):
      +    <li>Item ${loop.index}: ${a}</li>
      +% endfor
      +</ul>
      +
      +
      +

      See The Loop Context for more information on this feature.

      +

      +New in version 0.7.

      +
      +
      +
      +

      Comments??

      +

      Comments come in two varieties. The single line comment uses +## as the first non-space characters on a line:

      +
      ## this is a comment.
      +...text ...
      +
      +
      +

      A multiline version exists using <%doc> ...text... </%doc>:

      +
      <%doc>
      +    these are comments
      +    more comments
      +</%doc>
      +
      +
      +
      +
      +

      Newline Filters??

      +

      The backslash (“\”) character, placed at the end of any +line, will consume the newline character before continuing to +the next line:

      +
      here is a line that goes onto \
      +another line.
      +
      +
      +

      The above text evaluates to:

      +
      here is a line that goes onto another line.
      +
      +
      +
      +
      +

      Python Blocks??

      +

      Any arbitrary block of python can be dropped in using the <% +%> tags:

      +
      this is a template
      +<%
      +    x = db.get_resource('foo')
      +    y = [z.element for z in x if x.frobnizzle==5]
      +%>
      +% for elem in y:
      +    element: ${elem}
      +% endfor
      +
      +
      +

      Within <% %>, you’re writing a regular block of Python code. +While the code can appear with an arbitrary level of preceding +whitespace, it has to be consistently formatted with itself. +Mako’s compiler will adjust the block of Python to be consistent +with the surrounding generated Python code.

      +
      +
      +

      Module-level Blocks??

      +

      A variant on <% %> is the module-level code block, denoted +by <%! %>. Code within these tags is executed at the module +level of the template, and not within the rendering function of +the template. Therefore, this code does not have access to the +template’s context and is only executed when the template is +loaded into memory (which can be only once per application, or +more, depending on the runtime environment). Use the <%! %> +tags to declare your template’s imports, as well as any +pure-Python functions you might want to declare:

      +
      <%!
      +    import mylib
      +    import re
      +
      +    def filter(text):
      +        return re.sub(r'^@', '', text)
      +%>
      +
      +
      +

      Any number of <%! %> blocks can be declared anywhere in a +template; they will be rendered in the resulting module +in a single contiguous block above all render callables, +in the order in which they appear in the source template.

      +
      +
      +

      Tags??

      +

      The rest of what Mako offers takes place in the form of tags. +All tags use the same syntax, which is similar to an XML tag +except that the first character of the tag name is a % +character. The tag is closed either by a contained slash +character, or an explicit closing tag:

      +
      <%include file="foo.txt"/>
      +
      +<%def name="foo" buffered="True">
      +    this is a def
      +</%def>
      +
      +
      +

      All tags have a set of attributes which are defined for each +tag. Some of these attributes are required. Also, many +attributes support evaluation, meaning you can embed an +expression (using ${}) inside the attribute text:

      +
      <%include file="/foo/bar/${myfile}.txt"/>
      +
      +
      +

      Whether or not an attribute accepts runtime evaluation depends +on the type of tag and how that tag is compiled into the +template. The best way to find out if you can stick an +expression in is to try it! The lexer will tell you if it’s not +valid.

      +

      Heres a quick summary of all the tags:

      +
      +

      <%page>??

      +

      This tag defines general characteristics of the template, +including caching arguments, and optional lists of arguments +which the template expects when invoked.

      +
      <%page args="x, y, z='default'"/>
      +
      +
      +

      Or a page tag that defines caching characteristics:

      +
      <%page cached="True" cache_type="memory"/>
      +
      +
      +

      Currently, only one <%page> tag gets used per template, the +rest get ignored. While this will be improved in a future +release, for now make sure you have only one <%page> tag +defined in your template, else you may not get the results you +want. The details of what <%page> is used for are described +further in The body() Method as well as Caching.

      +
      +
      +

      <%include>??

      +

      A tag that is familiar from other template languages, %include +is a regular joe that just accepts a file argument and calls in +the rendered result of that file:

      +
      <%include file="header.html"/>
      +
      +    hello world
      +
      +<%include file="footer.html"/>
      +
      +
      +

      Include also accepts arguments which are available as <%page> arguments in the receiving template:

      +
      <%include file="toolbar.html" args="current_section='members', username='ed'"/>
      +
      +
      +
      +
      +

      <%def>??

      +

      The %def tag defines a Python function which contains a set +of content, that can be called at some other point in the +template. The basic idea is simple:

      +
      <%def name="myfunc(x)">
      +    this is myfunc, x is ${x}
      +</%def>
      +
      +${myfunc(7)}
      +
      +
      +

      The %def tag is a lot more powerful than a plain Python def, as +the Mako compiler provides many extra services with %def that +you wouldn’t normally have, such as the ability to export defs +as template “methods”, automatic propagation of the current +Context, buffering/filtering/caching flags, and def calls +with content, which enable packages of defs to be sent as +arguments to other def calls (not as hard as it sounds). Get the +full deal on what %def can do in Defs and Blocks.

      +
      +
      +

      <%block>??

      +

      %block is a tag that is close to a %def, +except executes itself immediately in its base-most scope, +and can also be anonymous (i.e. with no name):

      +
      <%block filter="h">
      +    some <html> stuff.
      +</%block>
      +
      +
      +

      Inspired by Jinja2 blocks, named blocks offer a syntactically pleasing way +to do inheritance:

      +
      <html>
      +    <body>
      +    <%block name="header">
      +        <h2><%block name="title"/></h2>
      +    </%block>
      +    ${self.body()}
      +    </body>
      +</html>
      +
      +
      +

      Blocks are introduced in Using Blocks and further described in Inheritance.

      +

      +New in version 0.4.1.

      +
      +
      +

      <%namespace>??

      +

      %namespace is Mako’s equivalent of Python’s import +statement. It allows access to all the rendering functions and +metadata of other template files, plain Python modules, as well +as locally defined “packages” of functions.

      +
      <%namespace file="functions.html" import="*"/>
      +
      +
      +

      The underlying object generated by %namespace, an instance of +mako.runtime.Namespace, is a central construct used in +templates to reference template-specific information such as the +current URI, inheritance structures, and other things that are +not as hard as they sound right here. Namespaces are described +in Namespaces.

      +
      +
      +

      <%inherit>??

      +

      Inherit allows templates to arrange themselves in inheritance +chains. This is a concept familiar in many other template +languages.

      +
      <%inherit file="base.html"/>
      +
      +
      +

      When using the %inherit tag, control is passed to the topmost +inherited template first, which then decides how to handle +calling areas of content from its inheriting templates. Mako +offers a lot of flexibility in this area, including dynamic +inheritance, content wrapping, and polymorphic method calls. +Check it out in Inheritance.

      +
      +
      +

      <%nsname:defname>??

      +

      Any user-defined “tag” can be created against +a namespace by using a tag with a name of the form +<%<namespacename>:<defname>>. The closed and open formats of such a +tag are equivalent to an inline expression and the <%call> +tag, respectively.

      +
      <%mynamespace:somedef param="some value">
      +    this is the body
      +</%mynamespace:somedef>
      +
      +
      +

      To create custom tags which accept a body, see +Calling a Def with Embedded Content and/or Other Defs.

      +

      +New in version 0.2.3.

      +
      +
      +

      <%call>??

      +

      The call tag is the “classic” form of a user-defined tag, and is +roughly equivalent to the <%namespacename:defname> syntax +described above. This tag is also described in Calling a Def with Embedded Content and/or Other Defs.

      +
      +
      +

      <%doc>??

      +

      The %doc tag handles multiline comments:

      +
      <%doc>
      +    these are comments
      +    more comments
      +</%doc>
      +
      +
      +

      Also the ## symbol as the first non-space characters on a line can be used for single line comments.

      +
      +
      +

      <%text>??

      +

      This tag suspends the Mako lexer’s normal parsing of Mako +template directives, and returns its entire body contents as +plain text. It is used pretty much to write documentation about +Mako:

      +
      <%text filter="h">
      +    heres some fake mako ${syntax}
      +    <%def name="x()">${x}</%def>
      +</%text>
      +
      +
      +
      +
      +
      +

      Returning Early from a Template??

      +

      Sometimes you want to stop processing a template or <%def> +method in the middle and just use the text you’ve accumulated so +far. You can use a return statement inside a Python +block to do that.

      +
      % if not len(records):
      +    No records found.
      +    <% return %>
      +% endif
      +
      +
      +

      Or perhaps:

      +
      <%
      +    if not len(records):
      +        return
      +%>
      +
      +
      +
      +
      + +
      + +
      + + + +
      + +
      + +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/unicode.html b/lib3/Mako-0.7.3/doc/unicode.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/unicode.html @@ -0,0 +1,476 @@ + + + + + + + The Unicode Chapter + — + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + ?? + The Unicode Chapter + + +

      + + The Unicode Chapter + +

      +
      + +
      + +
      + + + +
      + +
      +

      The Unicode Chapter??

      +

      The Python language supports two ways of representing what we +know as “strings”, i.e. series of characters. In Python 2, the +two types are string and unicode, and in Python 3 they are +bytes and string. A key aspect of the Python 2 string and +Python 3 bytes types are that they contain no information +regarding what encoding the data is stored in. For this +reason they were commonly referred to as byte strings on +Python 2, and Python 3 makes this name more explicit. The +origins of this come from Python’s background of being developed +before the Unicode standard was even available, back when +strings were C-style strings and were just that, a series of +bytes. Strings that had only values below 128 just happened to +be ASCII strings and were printable on the console, whereas +strings with values above 128 would produce all kinds of +graphical characters and bells.

      +

      Contrast the “byte-string” type with the “unicode/string” type. +Objects of this latter type are created whenever you say something like +u"hello world" (or in Python 3, just "hello world"). In this +case, Python represents each character in the string internally +using multiple bytes per character (something similar to +UTF-16). What’s important is that when using the +unicode/string type to store strings, Python knows the +data’s encoding; it’s in its own internal format. Whereas when +using the string/bytes type, it does not.

      +

      When Python 2 attempts to treat a byte-string as a string, which +means it’s attempting to compare/parse its characters, to coerce +it into another encoding, or to decode it to a unicode object, +it has to guess what the encoding is. In this case, it will +pretty much always guess the encoding as ascii... and if the +byte-string contains bytes above value 128, you’ll get an error. +Python 3 eliminates much of this confusion by just raising an +error unconditionally if a byte-string is used in a +character-aware context.

      +

      There is one operation that Python can do with a non-ASCII +byte-string, and it’s a great source of confusion: it can dump the +byte-string straight out to a stream or a file, with nary a care +what the encoding is. To Python, this is pretty much like +dumping any other kind of binary data (like an image) to a +stream somewhere. In Python 2, it is common to see programs that +embed all kinds of international characters and encodings into +plain byte-strings (i.e. using "hello world" style literals) +can fly right through their run, sending reams of strings out to +wherever they are going, and the programmer, seeing the same +output as was expressed in the input, is now under the illusion +that his or her program is Unicode-compliant. In fact, their +program has no unicode awareness whatsoever, and similarly has +no ability to interact with libraries that are unicode aware. +Python 3 makes this much less likely by defaulting to unicode as +the storage format for strings.

      +

      The “pass through encoded data” scheme is what template +languages like Cheetah and earlier versions of Myghty do by +default. Mako as of version 0.2 also supports this mode of +operation when using Python 2, using the disable_unicode=True +flag. However, when using Mako in its default mode of +unicode-aware, it requires explicitness when dealing with +non-ASCII encodings. Additionally, if you ever need to handle +unicode strings and other kinds of encoding conversions more +intelligently, the usage of raw byte-strings quickly becomes a +nightmare, since you are sending the Python interpreter +collections of bytes for which it can make no intelligent +decisions with regards to encoding. In Python 3 Mako only allows +usage of native, unicode strings.

      +

      In normal Mako operation, all parsed template constructs and +output streams are handled internally as Python unicode +objects. It’s only at the point of render() that this unicode +stream may be rendered into whatever the desired output encoding +is. The implication here is that the template developer must +:ensure that the encoding of all non-ASCII templates is explicit (still required in Python 3), +that all non-ASCII-encoded expressions are in one way or another +converted to unicode +(not much of a burden in Python 3), and that the output stream of the +template is handled as a unicode stream being encoded to some +encoding (still required in Python 3).

      +
      +

      Specifying the Encoding of a Template File??

      +

      This is the most basic encoding-related setting, and it is +equivalent to Python’s “magic encoding comment”, as described in +pep-0263. Any +template that contains non-ASCII characters requires that this +comment be present so that Mako can decode to unicode (and also +make usage of Python’s AST parsing services). Mako’s lexer will +use this encoding in order to convert the template source into a +unicode object before continuing its parsing:

      +
      ## -*- coding: utf-8 -*-
      +
      +Alors vous imaginez ma surprise, au lever du jour, quand
      +une dr??le de petite voix m???a r??veill??. Elle disait:
      + ?? S???il vous pla??t??? dessine-moi un mouton! ??
      +
      +
      +

      For the picky, the regular expression used is derived from that +of the above mentioned pep:

      +
      #.*coding[:=]\s*([-\w.]+).*\n
      +
      +
      +

      The lexer will convert to unicode in all cases, so that if any +characters exist in the template that are outside of the +specified encoding (or the default of ascii), the error will +be immediate.

      +

      As an alternative, the template encoding can be specified +programmatically to either Template or TemplateLookup via +the input_encoding parameter:

      +
      t = TemplateLookup(directories=['./'], input_encoding='utf-8')
      +
      +
      +

      The above will assume all located templates specify utf-8 +encoding, unless the template itself contains its own magic +encoding comment, which takes precedence.

      +
      +
      +

      Handling Expressions??

      +

      The next area that encoding comes into play is in expression +constructs. By default, Mako’s treatment of an expression like +this:

      +
      ${"hello world"}
      +
      +
      +

      looks something like this:

      +
      context.write(unicode("hello world"))
      +
      +
      +

      In Python 3, it’s just:

      +
      context.write(str("hello world"))
      +
      +
      +

      That is, the output of all expressions is run through the +``unicode`` built-in. This is the default setting, and can be +modified to expect various encodings. The unicode step serves +both the purpose of rendering non-string expressions into +strings (such as integers or objects which contain __str()__ +methods), and to ensure that the final output stream is +constructed as a unicode object. The main implication of this is +that any raw byte-strings that contain an encoding other than +ASCII must first be decoded to a Python unicode object. It +means you can’t say this in Python 2:

      +
      ${"voix m???a r??veill??."}  ## error in Python 2!
      +
      +
      +

      You must instead say this:

      +
      ${u"voix m???a r??veill??."}  ## OK !
      +
      +
      +

      Similarly, if you are reading data from a file that is streaming +bytes, or returning data from some object that is returning a +Python byte-string containing a non-ASCII encoding, you have to +explicitly decode to unicode first, such as:

      +
      ${call_my_object().decode('utf-8')}
      +
      +
      +

      Note that filehandles acquired by open() in Python 3 default +to returning “text”, that is the decoding is done for you. See +Python 3’s documentation for the open() built-in for details on +this.

      +

      If you want a certain encoding applied to all expressions, +override the unicode builtin with the decode built-in at the +Template or TemplateLookup level:

      +
      t = Template(templatetext, default_filters=['decode.utf8'])
      +
      +
      +

      Note that the built-in decode object is slower than the +unicode function, since unlike unicode it’s not a Python +built-in, and it also checks the type of the incoming data to +determine if string conversion is needed first.

      +

      The default_filters argument can be used to entirely customize +the filtering process of expressions. This argument is described +in The default_filters Argument.

      +
      +
      +

      Defining Output Encoding??

      +

      Now that we have a template which produces a pure unicode output +stream, all the hard work is done. We can take the output and do +anything with it.

      +

      As stated in the “Usage” chapter, both Template and +TemplateLookup accept output_encoding and encoding_errors +parameters which can be used to encode the output in any Python +supported codec:

      +
      from mako.template import Template
      +from mako.lookup import TemplateLookup
      +
      +mylookup = TemplateLookup(directories=['/docs'], output_encoding='utf-8', encoding_errors='replace')
      +
      +mytemplate = mylookup.get_template("foo.txt")
      +print mytemplate.render()
      +
      +
      +

      render() will return a bytes object in Python 3 if an output +encoding is specified. By default it performs no encoding and +returns a native string.

      +

      render_unicode() will return the template output as a Python +unicode object (or string in Python 3):

      +
      print mytemplate.render_unicode()
      +
      +
      +

      The above method disgards the output encoding keyword argument; +you can encode yourself by saying:

      +
      print mytemplate.render_unicode().encode('utf-8', 'replace')
      +
      +
      +
      +

      Buffer Selection??

      +

      Mako does play some games with the style of buffering used +internally, to maximize performance. Since the buffer is by far +the most heavily used object in a render operation, it’s +important!

      +

      When calling render() on a template that does not specify any +output encoding (i.e. it’s ascii), Python’s cStringIO module, +which cannot handle encoding of non-ASCII unicode objects +(even though it can send raw byte-strings through), is used for +buffering. Otherwise, a custom Mako class called +FastEncodingBuffer is used, which essentially is a super +dumbed-down version of StringIO that gathers all strings into +a list and uses u''.join(elements) to produce the final output +– it’s markedly faster than StringIO.

      +
      +
      +
      +

      Saying to Heck with It: Disabling the Usage of Unicode Entirely??

      +

      Some segments of Mako’s userbase choose to make no usage of +Unicode whatsoever, and instead would prefer the “pass through” +approach; all string expressions in their templates return +encoded byte-strings, and they would like these strings to pass +right through. The only advantage to this approach is that +templates need not use u"" for literal strings; there’s an +arguable speed improvement as well since raw byte-strings +generally perform slightly faster than unicode objects in +Python. For these users, assuming they’re sticking with Python +2, they can hit the disable_unicode=True flag as so:

      +
      # -*- encoding:utf-8 -*-
      +from mako.template import Template
      +
      +t = Template("dr??le de petite voix m???a r??veill??.", disable_unicode=True, input_encoding='utf-8')
      +print t.code
      +
      +
      +

      The disable_unicode mode is strictly a Python 2 thing. It is +not supported at all in Python 3.

      +

      The generated module source code will contain elements like +these:

      +
      # -*- encoding:utf-8 -*-
      +#  ...more generated code ...
      +
      +def render_body(context,**pageargs):
      +    context.caller_stack.push_frame()
      +    try:
      +        __M_locals = dict(pageargs=pageargs)
      +        # SOURCE LINE 1
      +        context.write('dr\xc3\xb4le de petite voix m\xe2\x80\x99a r\xc3\xa9veill\xc3\xa9.')
      +        return ''
      +    finally:
      +        context.caller_stack.pop_frame()
      +
      +
      +

      Where above that the string literal used within Context.write() +is a regular byte-string.

      +

      When disable_unicode=True is turned on, the default_filters +argument which normally defaults to ["unicode"] now defaults +to ["str"] instead. Setting default_filters to the empty list +[] can remove the overhead of the str call. Also, in this +mode you cannot safely call render_unicode() – you’ll get +unicode/decode errors.

      +

      The h filter (HTML escape) uses a less performant pure Python +escape function in non-unicode mode. This because +MarkupSafe only supports Python unicode objects for non-ASCII +strings.

      +

      +Changed in version 0.3.4: In prior versions, it used cgi.escape(), which has been replaced +with a function that also escapes single quotes.

      +
      +

      Rules for using disable_unicode=True??

      +
        +
      • Don’t use this mode unless you really, really want to and you +absolutely understand what you’re doing.
      • +
      • Don’t use this option just because you don’t want to learn to +use Unicode properly; we aren’t supporting user issues in this +mode of operation. We will however offer generous help for the +vast majority of users who stick to the Unicode program.
      • +
      • Python 3 is unicode by default, and the flag is not available +when running on Python 3.
      • +
      +
      +
      +
      + +
      + +
      + + + +
      + +
      + +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/doc/usage.html b/lib3/Mako-0.7.3/doc/usage.html new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/doc/usage.html @@ -0,0 +1,1057 @@ + + + + + + + Usage + — + Mako 0.7.3 Documentation + + + + + + + + + + + + + + + + + + + + +
      +
      + + +
      + Hyperfast and lightweight templating for the Python platform. +
      + + +
      + + + +
      + + + + + + + + + + + +
      + + + +
      +

      Mako 0.7.3 Documentation

      + + + +
      + Release: 0.7.3 + +
      + +
      + +
      + + +
      + Mako 0.7.3 Documentation + ?? + Usage + + +

      + + Usage + +

      +
      + +
      + +
      + + + +
      + +
      +

      Usage??

      +
      +

      Basic Usage??

      +

      This section describes the Python API for Mako templates. If you +are using Mako within a web framework such as Pylons, the work +of integrating Mako’s API is already done for you, in which case +you can skip to the next section, Syntax.

      +

      The most basic way to create a template and render it is through +the Template class:

      +
      from mako.template import Template
      +
      +mytemplate = Template("hello world!")
      +print mytemplate.render()
      +
      +
      +

      Above, the text argument to Template is compiled into a +Python module representation. This module contains a function +called render_body(), which produces the output of the +template. When mytemplate.render() is called, Mako sets up a +runtime environment for the template and calls the +render_body() function, capturing the output into a buffer and +returning its string contents.

      +

      The code inside the render_body() function has access to a +namespace of variables. You can specify these variables by +sending them as additional keyword arguments to the render() +method:

      +
      from mako.template import Template
      +
      +mytemplate = Template("hello, ${name}!")
      +print mytemplate.render(name="jack")
      +
      +
      +

      The render() method calls upon Mako to create a +Context object, which stores all the variable names accessible +to the template and also stores a buffer used to capture output. +You can create this Context yourself and have the template +render with it, using the render_context() method:

      +
      from mako.template import Template
      +from mako.runtime import Context
      +from StringIO import StringIO
      +
      +mytemplate = Template("hello, ${name}!")
      +buf = StringIO()
      +ctx = Context(buf, name="jack")
      +mytemplate.render_context(ctx)
      +print buf.getvalue()
      +
      +
      +
      +
      +

      Using File-Based Templates??

      +

      A Template can also load its template source code from a file, +using the filename keyword argument:

      +
      from mako.template import Template
      +
      +mytemplate = Template(filename='/docs/mytmpl.txt')
      +print mytemplate.render()
      +
      +
      +

      For improved performance, a Template which is loaded from a +file can also cache the source code to its generated module on +the filesystem as a regular Python module file (i.e. a .py +file). To do this, just add the module_directory argument to +the template:

      +
      from mako.template import Template
      +
      +mytemplate = Template(filename='/docs/mytmpl.txt', module_directory='/tmp/mako_modules')
      +print mytemplate.render()
      +
      +
      +

      When the above code is rendered, a file +/tmp/mako_modules/docs/mytmpl.txt.py is created containing the +source code for the module. The next time a Template with the +same arguments is created, this module file will be +automatically re-used.

      +
      +
      +

      Using TemplateLookup??

      +

      All of the examples thus far have dealt with the usage of a +single Template object. If the code within those templates +tries to locate another template resource, it will need some way +to find them, using simple URI strings. For this need, the +resolution of other templates from within a template is +accomplished by the TemplateLookup class. This class is +constructed given a list of directories in which to search for +templates, as well as keyword arguments that will be passed to +the Template objects it creates:

      +
      from mako.template import Template
      +from mako.lookup import TemplateLookup
      +
      +mylookup = TemplateLookup(directories=['/docs'])
      +mytemplate = Template("""<%include file="header.txt"/> hello world!""", lookup=mylookup)
      +
      +
      +

      Above, we created a textual template which includes the file +"header.txt". In order for it to have somewhere to look for +"header.txt", we passed a TemplateLookup object to it, which +will search in the directory /docs for the file "header.txt".

      +

      Usually, an application will store most or all of its templates +as text files on the filesystem. So far, all of our examples +have been a little bit contrived in order to illustrate the +basic concepts. But a real application would get most or all of +its templates directly from the TemplateLookup, using the +aptly named get_template() method, which accepts the URI of the +desired template:

      +
      from mako.template import Template
      +from mako.lookup import TemplateLookup
      +
      +mylookup = TemplateLookup(directories=['/docs'], module_directory='/tmp/mako_modules')
      +
      +def serve_template(templatename, **kwargs):
      +    mytemplate = mylookup.get_template(templatename)
      +    print mytemplate.render(**kwargs)
      +
      +
      +

      In the example above, we create a TemplateLookup which will +look for templates in the /docs directory, and will store +generated module files in the /tmp/mako_modules directory. The +lookup locates templates by appending the given URI to each of +its search directories; so if you gave it a URI of +/etc/beans/info.txt, it would search for the file +/docs/etc/beans/info.txt, else raise a TopLevelNotFound +exception, which is a custom Mako exception.

      +

      When the lookup locates templates, it will also assign a uri +property to the Template which is the URI passed to the +get_template() call. Template uses this URI to calculate the +name of its module file. So in the above example, a +templatename argument of /etc/beans/info.txt will create a +module file /tmp/mako_modules/etc/beans/info.txt.py.

      +
      +

      Setting the Collection Size??

      +

      The TemplateLookup also serves the important need of caching a +fixed set of templates in memory at a given time, so that +successive URI lookups do not result in full template +compilations and/or module reloads on each request. By default, +the TemplateLookup size is unbounded. You can specify a fixed +size using the collection_size argument:

      +
      mylookup = TemplateLookup(directories=['/docs'],
      +                module_directory='/tmp/mako_modules', collection_size=500)
      +
      +
      +

      The above lookup will continue to load templates into memory +until it reaches a count of around 500. At that point, it will +clean out a certain percentage of templates using a least +recently used scheme.

      +
      +
      +

      Setting Filesystem Checks??

      +

      Another important flag on TemplateLookup is +filesystem_checks. This defaults to True, and says that each +time a template is returned by the get_template() method, the +revision time of the original template file is checked against +the last time the template was loaded, and if the file is newer +will reload its contents and recompile the template. On a +production system, setting filesystem_checks to False can +afford a small to moderate performance increase (depending on +the type of filesystem used).

      +
      +
      +
      +

      Using Unicode and Encoding??

      +

      Both Template and TemplateLookup accept output_encoding +and encoding_errors parameters which can be used to encode the +output in any Python supported codec:

      +
      from mako.template import Template
      +from mako.lookup import TemplateLookup
      +
      +mylookup = TemplateLookup(directories=['/docs'], output_encoding='utf-8', encoding_errors='replace')
      +
      +mytemplate = mylookup.get_template("foo.txt")
      +print mytemplate.render()
      +
      +
      +

      When using Python 3, the render() method will return a bytes +object, if output_encoding is set. Otherwise it returns a +string.

      +

      Additionally, the render_unicode() method exists which will +return the template output as a Python unicode object, or in +Python 3 a string:

      +
      print mytemplate.render_unicode()
      +
      +
      +

      The above method disregards the output encoding keyword +argument; you can encode yourself by saying:

      +
      print mytemplate.render_unicode().encode('utf-8', 'replace')
      +
      +
      +

      Note that Mako’s ability to return data in any encoding and/or +unicode implies that the underlying output stream of the +template is a Python unicode object. This behavior is described +fully in The Unicode Chapter.

      +
      +
      +

      Handling Exceptions??

      +

      Template exceptions can occur in two distinct places. One is +when you lookup, parse and compile the template, the other +is when you run the template. Within the running of a +template, exceptions are thrown normally from whatever Python +code originated the issue. Mako has its own set of exception +classes which mostly apply to the lookup and lexer/compiler +stages of template construction. Mako provides some library +routines that can be used to help provide Mako-specific +information about any exception’s stack trace, as well as +formatting the exception within textual or HTML format. In all +cases, the main value of these handlers is that of converting +Python filenames, line numbers, and code samples into Mako +template filenames, line numbers, and code samples. All lines +within a stack trace which correspond to a Mako template module +will be converted to be against the originating template file.

      +

      To format exception traces, the text_error_template() and +html_error_template() functions are provided. They make usage of +sys.exc_info() to get at the most recently thrown exception. +Usage of these handlers usually looks like:

      +
      from mako import exceptions
      +
      +try:
      +    template = lookup.get_template(uri)
      +    print template.render()
      +except:
      +    print exceptions.text_error_template().render()
      +
      +
      +

      Or for the HTML render function:

      +
      from mako import exceptions
      +
      +try:
      +    template = lookup.get_template(uri)
      +    print template.render()
      +except:
      +    print exceptions.html_error_template().render()
      +
      +
      +

      The html_error_template() template accepts two options: +specifying full=False causes only a section of an HTML +document to be rendered. Specifying css=False will disable the +default stylesheet from being rendered.

      +

      E.g.:

      +
      print exceptions.html_error_template().render(full=False)
      +
      +
      +

      The HTML render function is also available built-in to +Template using the format_exceptions flag. In this case, any +exceptions raised within the render stage of the template +will result in the output being substituted with the output of +html_error_template():

      +
      template = Template(filename="/foo/bar", format_exceptions=True)
      +print template.render()
      +
      +
      +

      Note that the compile stage of the above template occurs when +you construct the Template itself, and no output stream is +defined. Therefore exceptions which occur within the +lookup/parse/compile stage will not be handled and will +propagate normally. While the pre-render traceback usually will +not include any Mako-specific lines anyway, it will mean that +exceptions which occur previous to rendering and those which +occur within rendering will be handled differently... so the +try/except patterns described previously are probably of more +general use.

      +

      The underlying object used by the error template functions is +the RichTraceback object. This object can also be used +directly to provide custom error views. Here’s an example usage +which describes its general API:

      +
      from mako.exceptions import RichTraceback
      +
      +try:
      +    template = lookup.get_template(uri)
      +    print template.render()
      +except:
      +    traceback = RichTraceback()
      +    for (filename, lineno, function, line) in traceback.traceback:
      +        print "File %s, line %s, in %s" % (filename, lineno, function)
      +        print line, "\n"
      +    print "%s: %s" % (str(traceback.error.__class__.__name__), traceback.error)
      +
      +
      +
      +
      +

      Common Framework Integrations??

      +

      The Mako distribution includes a little bit of helper code for +the purpose of using Mako in some popular web framework +scenarios. This is a brief description of what’s included.

      +
      +

      WSGI??

      +

      A sample WSGI application is included in the distribution in the +file examples/wsgi/run_wsgi.py. This runner is set up to pull +files from a templates as well as an htdocs directory and +includes a rudimental two-file layout. The WSGI runner acts as a +fully functional standalone web server, using wsgiutils to run +itself, and propagates GET and POST arguments from the request +into the Context, can serve images, CSS files and other kinds +of files, and also displays errors using Mako’s included +exception-handling utilities.

      +
      +
      +

      Pygments??

      +

      A Pygments-compatible syntax +highlighting module is included under mako.ext.pygmentplugin. +This module is used in the generation of Mako documentation and +also contains various setuptools entry points under the heading +pygments.lexers, including mako, html+mako, xml+mako +(see the setup.py file for all the entry points).

      +
      +
      +

      Babel??

      +

      Mako provides support for extracting gettext messages from +templates via a Babel extractor +entry point under mako.ext.babelplugin.

      +

      Gettext messages are extracted from all Python code sections, +including those of control lines and expressions embedded +in tags.

      +

      Translator +comments +may also be extracted from Mako templates when a comment tag is +specified to Babel (such as with +the -c option).

      +

      For example, a project "myproj" contains the following Mako +template at myproj/myproj/templates/name.html:

      +
      <div id="name">
      +  Name:
      +  ## TRANSLATORS: This is a proper name. See the gettext
      +  ## manual, section Names.
      +  ${_('Francois Pinard')}
      +</div>
      +
      +
      +

      To extract gettext messages from this template the project needs +a Mako section in its Babel Extraction Method Mapping +file +(typically located at myproj/babel.cfg):

      +
      # Extraction from Python source files
      +
      +[python: myproj/**.py]
      +
      +# Extraction from Mako templates
      +
      +[mako: myproj/templates/**.html]
      +input_encoding = utf-8
      +
      +
      +

      The Mako extractor supports an optional input_encoding +parameter specifying the encoding of the templates (identical to +Template/TemplateLookup‘s input_encoding parameter).

      +

      Invoking Babel‘s extractor at the +command line in the project’s root directory:

      +
      myproj$ pybabel extract -F babel.cfg -c "TRANSLATORS:" .
      +
      +
      +

      will output a gettext catalog to stdout including the following:

      +
      #. TRANSLATORS: This is a proper name. See the gettext
      +#. manual, section Names.
      +#: myproj/templates/name.html:5
      +msgid "Francois Pinard"
      +msgstr ""
      +
      +
      +

      This is only a basic example: +Babel can be invoked from setup.py +and its command line options specified in the accompanying +setup.cfg via Babel Distutils/Setuptools +Integration.

      +

      Comments must immediately precede a gettext message to be +extracted. In the following case the TRANSLATORS: comment would +not have been extracted:

      +
      <div id="name">
      +  ## TRANSLATORS: This is a proper name. See the gettext
      +  ## manual, section Names.
      +  Name: ${_('Francois Pinard')}
      +</div>
      +
      +
      +

      See the Babel User +Guide +for more information.

      +
      +
      +
      +

      API Reference??

      +
      +
      +class mako.template.Template(text=None, filename=None, uri=None, format_exceptions=False, error_handler=None, lookup=None, output_encoding=None, encoding_errors='strict', module_directory=None, cache_args=None, cache_impl='beaker', cache_enabled=True, cache_type=None, cache_dir=None, cache_url=None, module_filename=None, input_encoding=None, disable_unicode=False, module_writer=None, bytestring_passthrough=False, default_filters=None, buffer_filters=(), strict_undefined=False, imports=None, enable_loop=True, preprocessor=None)??
      +

      Bases: object

      +

      Represents a compiled template.

      +

      Template includes a reference to the original +template source (via the source attribute) +as well as the source code of the +generated Python module (i.e. the code attribute), +as well as a reference to an actual Python module.

      +

      Template is constructed using either a literal string +representing the template text, or a filename representing a filesystem +path to a source file.

      + +++ + + + +
      Parameters:
        +
      • text – textual template source. This argument is mutually +exclusive versus the filename parameter.
      • +
      • filename – filename of the source template. This argument is +mutually exclusive versus the text parameter.
      • +
      • buffer_filters – string list of filters to be applied +to the output of %defs which are buffered, cached, or otherwise +filtered, after all filters +defined with the %def itself have been applied. Allows the +creation of default expression filters that let the output +of return-valued %defs “opt out” of that filtering via +passing special attributes or objects.
      • +
      • bytestring_passthrough

        When True, and output_encoding is +set to None, and Template.render() is used to render, +the StringIO or cStringIO buffer will be used instead of the +default “fast” buffer. This allows raw bytestrings in the +output stream, such as in expressions, to pass straight +through to the buffer. This flag is forced +to True if disable_unicode is also configured.

        +

        +New in version 0.4: Added to provide the same behavior as that of the previous series.

        +
      • +
      • cache_args – Dictionary of cache configuration arguments that +will be passed to the CacheImpl. See Caching.
      • +
      • cache_dir

        +Deprecated since version 0.6: Use the 'dir' argument in the cache_args dictionary. +See Caching.

        +
      • +
      • cache_enabled – Boolean flag which enables caching of this +template. See Caching.
      • +
      • cache_impl – String name of a CacheImpl caching +implementation to use. Defaults to 'beaker'.
      • +
      • cache_type

        +Deprecated since version 0.6: Use the 'type' argument in the cache_args dictionary. +See Caching.

        +
      • +
      • cache_url

        +Deprecated since version 0.6: Use the 'url' argument in the cache_args dictionary. +See Caching.

        +
      • +
      • default_filters – List of string filter names that will +be applied to all expressions. See The default_filters Argument.
      • +
      • disable_unicode – Disables all awareness of Python Unicode +objects. See Saying to Heck with It: Disabling the Usage of Unicode Entirely.
      • +
      • enable_loop – When True, enable the loop context variable. +This can be set to False to support templates that may +be making usage of the name “loop”. Individual templates can +re-enable the “loop” context by placing the directive +enable_loop="True" inside the <%page> tag – see +Migrating Legacy Templates that Use the Word “loop”.
      • +
      • encoding_errors – Error parameter passed to encode() when +string encoding is performed. See Using Unicode and Encoding.
      • +
      • error_handler – Python callable which is called whenever +compile or runtime exceptions occur. The callable is passed +the current context as well as the exception. If the +callable returns True, the exception is considered to +be handled, else it is re-raised after the function +completes. Is used to provide custom error-rendering +functions.
      • +
      • format_exceptions – if True, exceptions which occur during +the render phase of this template will be caught and +formatted into an HTML error page, which then becomes the +rendered result of the render() call. Otherwise, +runtime exceptions are propagated outwards.
      • +
      • imports – String list of Python statements, typically individual +“import” lines, which will be placed into the module level +preamble of all generated Python modules. See the example +in The default_filters Argument.
      • +
      • input_encoding – Encoding of the template’s source code. Can +be used in lieu of the coding comment. See +Using Unicode and Encoding as well as The Unicode Chapter for +details on source encoding.
      • +
      • lookup – a TemplateLookup instance that will be used +for all file lookups via the <%namespace>, +<%include>, and <%inherit> tags. See +Using TemplateLookup.
      • +
      • module_directory – Filesystem location where generated +Python module files will be placed.
      • +
      • module_filename – Overrides the filename of the generated +Python module file. For advanced usage only.
      • +
      • module_writer

        A callable which overrides how the Python +module is written entirely. The callable is passed the +encoded source content of the module and the destination +path to be written to. The default behavior of module writing +uses a tempfile in conjunction with a file move in order +to make the operation atomic. So a user-defined module +writing function that mimics the default behavior would be:

        +
        import tempfile
        +import os
        +import shutil
        +
        +def module_writer(source, outputpath):
        +    (dest, name) = \
        +        tempfile.mkstemp(
        +            dir=os.path.dirname(outputpath)
        +        )
        +
        +    os.write(dest, source)
        +    os.close(dest)
        +    shutil.move(name, outputpath)
        +
        +from mako.template import Template
        +mytemplate = Template(
        +                file="index.html",
        +                module_directory="/path/to/modules",
        +                module_writer=module_writer
        +            )
        +
        +
        +

        The function is provided for unusual configurations where +certain platform-specific permissions or other special +steps are needed.

        +
      • +
      • output_encoding – The encoding to use when render() +is called. +See Using Unicode and Encoding as well as The Unicode Chapter.
      • +
      • preprocessor – Python callable which will be passed +the full template source before it is parsed. The return +result of the callable will be used as the template source +code.
      • +
      • strict_undefined

        Replaces the automatic usage of +UNDEFINED for any undeclared variables not located in +the Context with an immediate raise of +NameError. The advantage is immediate reporting of +missing variables which include the name.

        +

        +New in version 0.3.6.

        +
      • +
      • uri – string URI or other identifier for this template. +If not provided, the uri is generated from the filesystem +path, or from the in-memory identity of a non-file-based +template. The primary usage of the uri is to provide a key +within TemplateLookup, as well as to generate the +file path of the generated Python module file, if +module_directory is specified.
      • +
      +
      +
      +
      +code??
      +

      Return the module source code for this Template.

      +
      + +
      +
      +get_def(name)??
      +

      Return a def of this template as a DefTemplate.

      +
      + +
      +
      +render(*args, **data)??
      +

      Render the output of this template as a string.

      +

      If the template specifies an output encoding, the string +will be encoded accordingly, else the output is raw (raw +output uses cStringIO and can’t handle multibyte +characters). A Context object is created corresponding +to the given data. Arguments that are explicitly declared +by this template’s internal rendering method are also +pulled from the given *args, **data members.

      +
      + +
      +
      +render_context(context, *args, **kwargs)??
      +

      Render this Template with the given context.

      +

      The data is written to the context’s buffer.

      +
      + +
      +
      +render_unicode(*args, **data)??
      +

      Render the output of this template as a unicode object.

      +
      + +
      +
      +source??
      +

      Return the template source code for this Template.

      +
      + +
      + +
      +
      +class mako.template.DefTemplate(parent, callable_)??
      +

      Bases: mako.template.Template

      +

      A Template which represents a callable def in a parent +template.

      +
      + +
      +
      +class mako.lookup.TemplateCollection??
      +

      Bases: object

      +

      Represent a collection of Template objects, +identifiable via URI.

      +

      A TemplateCollection is linked to the usage of +all template tags that address other templates, such +as <%include>, <%namespace>, and <%inherit>. +The file attribute of each of those tags refers +to a string URI that is passed to that Template +object’s TemplateCollection for resolution.

      +

      TemplateCollection is an abstract class, +with the usual default implementation being TemplateLookup.

      +
      +
      +adjust_uri(uri, filename)??
      +

      Adjust the given uri based on the calling filename.

      +

      When this method is called from the runtime, the +filename parameter is taken directly to the filename +attribute of the calling template. Therefore a custom +TemplateCollection subclass can place any string +identifier desired in the filename parameter of the +Template objects it constructs and have them come back +here.

      +
      + +
      +
      +filename_to_uri(uri, filename)??
      +

      Convert the given filename to a URI relative to +this TemplateCollection.

      +
      + +
      +
      +get_template(uri, relativeto=None)??
      +

      Return a Template object corresponding to the given +uri.

      +

      The default implementation raises +NotImplementedError. Implementations should +raise TemplateLookupException if the given uri +cannot be resolved.

      + +++ + + + +
      Parameters:
        +
      • uri – String URI of the template to be resolved.
      • +
      • relativeto – if present, the given uri is assumed to +be relative to this URI.
      • +
      +
      +
      + +
      +
      +has_template(uri)??
      +

      Return True if this TemplateLookup is +capable of returning a Template object for the +given uri.

      + +++ + + + +
      Parameters:uri – String URI of the template to be resolved.
      +
      + +
      + +
      +
      +class mako.lookup.TemplateLookup(directories=None, module_directory=None, filesystem_checks=True, collection_size=-1, format_exceptions=False, error_handler=None, disable_unicode=False, bytestring_passthrough=False, output_encoding=None, encoding_errors='strict', cache_args=None, cache_impl='beaker', cache_enabled=True, cache_type=None, cache_dir=None, cache_url=None, modulename_callable=None, module_writer=None, default_filters=None, buffer_filters=(), strict_undefined=False, imports=None, enable_loop=True, input_encoding=None, preprocessor=None)??
      +

      Bases: mako.lookup.TemplateCollection

      +

      Represent a collection of templates that locates template source files +from the local filesystem.

      +

      The primary argument is the directories argument, the list of +directories to search:

      +
      lookup = TemplateLookup(["/path/to/templates"])
      +some_template = lookup.get_template("/index.html")
      +
      +
      +

      The TemplateLookup can also be given Template objects +programatically using put_string() or put_template():

      +
      lookup = TemplateLookup()
      +lookup.put_string("base.html", '''
      +    <html><body>${self.next()}</body></html>
      +''')
      +lookup.put_string("hello.html", '''
      +    <%include file='base.html'/>
      +
      +    Hello, world !
      +''')
      +
      +
      + +++ + + + +
      Parameters:
        +
      • directories – A list of directory names which will be +searched for a particular template URI. The URI is appended +to each directory and the filesystem checked.
      • +
      • collection_size – Approximate size of the collection used +to store templates. If left at its default of -1, the size +is unbounded, and a plain Python dictionary is used to +relate URI strings to Template instances. +Otherwise, a least-recently-used cache object is used which +will maintain the size of the collection approximately to +the number given.
      • +
      • filesystem_checks – When at its default value of True, +each call to TemplateLookup.get_template() will +compare the filesystem last modified time to the time in +which an existing Template object was created. +This allows the TemplateLookup to regenerate a +new Template whenever the original source has +been updated. Set this to False for a very minor +performance increase.
      • +
      • modulename_callable – A callable which, when present, +is passed the path of the source file as well as the +requested URI, and then returns the full path of the +generated Python module file. This is used to inject +alternate schemes for Python module location. If left at +its default of None, the built in system of generation +based on module_directory plus uri is used.
      • +
      +
      +

      All other keyword parameters available for +Template are mirrored here. When new +Template objects are created, the keywords +established with this TemplateLookup are passed on +to each new Template.

      +
      +
      +adjust_uri(uri, relativeto)??
      +

      Adjust the given uri based on the given relative URI.

      +
      + +
      +
      +filename_to_uri(filename)??
      +

      Convert the given filename to a URI relative to +this TemplateCollection.

      +
      + +
      +
      +get_template(uri)??
      +

      Return a Template object corresponding to the given +uri.

      +
      +

      Note

      +

      The relativeto argument is not supported here at the moment.

      +
      +
      + +
      +
      +put_string(uri, text)??
      +

      Place a new Template object into this +TemplateLookup, based on the given string of +text.

      +
      + +
      +
      +put_template(uri, template)??
      +

      Place a new Template object into this +TemplateLookup, based on the given +Template object.

      +
      + +
      + +
      +
      +class mako.exceptions.RichTraceback(error=None, traceback=None)??
      +

      Bases: object

      +

      Pull the current exception from the sys traceback and extracts +Mako-specific template information.

      +

      See the usage examples in Handling Exceptions.

      +
      +
      +error??
      +

      the exception instance.

      +
      + +
      +
      +message??
      +

      the exception error message as unicode.

      +
      + +
      +
      +source??
      +

      source code of the file where the error occurred. +If the error occurred within a compiled template, +this is the template source.

      +
      + +
      +
      +lineno??
      +

      line number where the error occurred. If the error +occurred within a compiled template, the line number +is adjusted to that of the template source.

      +
      + +
      +
      +records??
      +

      a list of 8-tuples containing the original +python traceback elements, plus the +filename, line number, source line, and full template source +for the traceline mapped back to its originating source +template, if any for that traceline (else the fields are None).

      +
      + +
      +
      +reverse_records??
      +

      the list of records in reverse +traceback – a list of 4-tuples, in the same format as a regular +python traceback, with template-corresponding +traceback records replacing the originals.

      +
      + +
      +
      +reverse_traceback??
      +

      the traceback list in reverse.

      +
      + +
      + +
      +
      +mako.exceptions.html_error_template()??
      +

      Provides a template that renders a stack trace in an HTML format, +providing an excerpt of code as well as substituting source template +filenames, line numbers and code for that of the originating source +template, as applicable.

      +

      The template’s default encoding_errors value is 'htmlentityreplace'. The +template has two options. With the full option disabled, only a section of +an HTML document is returned. With the css option disabled, the default +stylesheet won’t be included.

      +
      + +
      +
      +mako.exceptions.text_error_template(lookup=None)??
      +

      Provides a template that renders a stack trace in a similar format to +the Python interpreter, substituting source template filenames, line +numbers and code for that of the originating source template, as +applicable.

      +
      + +
      +
      + +
      + +
      + + + +
      + +
      + +
      + + + +
      +
      + + diff --git a/lib3/Mako-0.7.3/examples/bench/basic.py b/lib3/Mako-0.7.3/examples/bench/basic.py new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/examples/bench/basic.py @@ -0,0 +1,191 @@ +# basic.py - basic benchmarks adapted from Genshi +# Copyright (C) 2006 Edgewall Software +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# 3. The name of the author may not be used to endorse or promote +# products derived from this software without specific prior +# written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS +# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from cgi import escape +import os +try: + from io import StringIO +except ImportError: + from io import StringIO +import sys +import timeit + +def u(stringlit): + if sys.version_info >= (3,): + return stringlit + else: + return stringlit.decode('latin1') + +__all__ = ['mako', 'mako_inheritance', 'jinja2', 'jinja2_inheritance', + 'cheetah', 'django', 'myghty', 'genshi', 'kid'] + +# Templates content and constants +TITLE = 'Just a test' +USER = 'joe' +ITEMS = ['Number %d' % num for num in range(1, 15)] +U_ITEMS = [u(item) for item in ITEMS] + +def genshi(dirname, verbose=False): + from genshi.template import TemplateLoader + loader = TemplateLoader([dirname], auto_reload=False) + template = loader.load('template.html') + def render(): + data = dict(title=TITLE, user=USER, items=ITEMS) + return template.generate(**data).render('xhtml') + + if verbose: + print((render())) + return render + +def myghty(dirname, verbose=False): + from myghty import interp + interpreter = interp.Interpreter(component_root=dirname) + def render(): + data = dict(title=TITLE, user=USER, items=ITEMS) + buffer = StringIO() + interpreter.execute("template.myt", request_args=data, out_buffer=buffer) + return buffer.getvalue() + if verbose: + print((render())) + return render + +def mako(dirname, verbose=False): + from mako.template import Template + from mako.lookup import TemplateLookup + disable_unicode = (sys.version_info < (3,)) + lookup = TemplateLookup(directories=[dirname], filesystem_checks=False, disable_unicode=disable_unicode) + template = lookup.get_template('template.html') + def render(): + return template.render(title=TITLE, user=USER, list_items=U_ITEMS) + if verbose: + print((template.code + " " + render())) + return render +mako_inheritance = mako + +def jinja2(dirname, verbose=False): + from jinja2 import Environment, FileSystemLoader + env = Environment(loader=FileSystemLoader(dirname)) + template = env.get_template('template.html') + def render(): + return template.render(title=TITLE, user=USER, list_items=U_ITEMS) + if verbose: + print((render())) + return render +jinja2_inheritance = jinja2 + +def cheetah(dirname, verbose=False): + from Cheetah.Template import Template + filename = os.path.join(dirname, 'template.tmpl') + template = Template(file=filename) + def render(): + template.__dict__.update({'title': TITLE, 'user': USER, + 'list_items': U_ITEMS}) + return template.respond() + + if verbose: + print((dir(template))) + print((template.generatedModuleCode())) + print((render())) + return render + +def django(dirname, verbose=False): + from django.conf import settings + settings.configure(TEMPLATE_DIRS=[os.path.join(dirname, 'templates')]) + from django import template, templatetags + from django.template import loader + templatetags.__path__.append(os.path.join(dirname, 'templatetags')) + tmpl = loader.get_template('template.html') + + def render(): + data = {'title': TITLE, 'user': USER, 'items': ITEMS} + return tmpl.render(template.Context(data)) + + if verbose: + print((render())) + return render + +def kid(dirname, verbose=False): + import kid + kid.path = kid.TemplatePath([dirname]) + template = kid.Template(file='template.kid') + def render(): + template = kid.Template(file='template.kid', + title=TITLE, user=USER, items=ITEMS) + return template.serialize(output='xhtml') + + if verbose: + print((render())) + return render + + +def run(engines, number=2000, verbose=False): + basepath = os.path.abspath(os.path.dirname(__file__)) + for engine in engines: + dirname = os.path.join(basepath, engine) + if verbose: + print(('%s:' % engine.capitalize())) + print('--------------------------------------------------------') + else: + sys.stdout.write('%s:' % engine.capitalize()) + t = timeit.Timer(setup='from __main__ import %s; render = %s(r"%s", %s)' + % (engine, engine, dirname, verbose), + stmt='render()') + + time = t.timeit(number=number) / number + if verbose: + print('--------------------------------------------------------') + print(('%.2f ms' % (1000 * time))) + if verbose: + print('--------------------------------------------------------') + + +if __name__ == '__main__': + engines = [arg for arg in sys.argv[1:] if arg[0] != '-'] + if not engines: + engines = __all__ + + verbose = '-v' in sys.argv + + if '-p' in sys.argv: + try: + import hotshot, hotshot.stats + prof = hotshot.Profile("template.prof") + benchtime = prof.runcall(run, engines, number=100, verbose=verbose) + stats = hotshot.stats.load("template.prof") + except ImportError: + import cProfile, pstats + stmt = "run(%r, number=%r, verbose=%r)" % (engines, 1000, verbose) + cProfile.runctx(stmt, globals(), {}, "template.prof") + stats = pstats.Stats("template.prof") + stats.strip_dirs() + stats.sort_stats('time', 'calls') + stats.print_stats() + else: + run(engines, verbose=verbose) diff --git a/lib3/Mako-0.7.3/examples/bench/cheetah/footer.tmpl b/lib3/Mako-0.7.3/examples/bench/cheetah/footer.tmpl new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/examples/bench/cheetah/footer.tmpl @@ -0,0 +1,2 @@ + diff --git a/lib3/Mako-0.7.3/examples/bench/cheetah/header.tmpl b/lib3/Mako-0.7.3/examples/bench/cheetah/header.tmpl new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/examples/bench/cheetah/header.tmpl @@ -0,0 +1,5 @@ + + + diff --git a/lib3/Mako-0.7.3/examples/bench/cheetah/template.tmpl b/lib3/Mako-0.7.3/examples/bench/cheetah/template.tmpl new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/examples/bench/cheetah/template.tmpl @@ -0,0 +1,31 @@ + + + + ${title} + + + + #def greeting(name) +

      hello ${name}!

      + #end def + + #include "cheetah/header.tmpl" + + $greeting($user) + $greeting('me') + $greeting('world') + +

      Loop

      + #if $list_items +
        + #for $list_item in $list_items +
      • $list_item
      • + #end for +
      + #end if + + #include "cheetah/footer.tmpl" + + diff --git a/lib3/Mako-0.7.3/examples/bench/django/templatetags/__init__.py b/lib3/Mako-0.7.3/examples/bench/django/templatetags/__init__.py new file mode 100644 diff --git a/lib3/Mako-0.7.3/examples/bench/django/templatetags/bench.py b/lib3/Mako-0.7.3/examples/bench/django/templatetags/bench.py new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/examples/bench/django/templatetags/bench.py @@ -0,0 +1,8 @@ +from django.template import Library, Node, resolve_variable +from django.utils.html import escape + +register = Library() + +def greeting(name): + return 'Hello, %s!' % escape(name) +greeting = register.simple_tag(greeting) diff --git a/lib3/Mako-0.7.3/examples/bench/kid/base.kid b/lib3/Mako-0.7.3/examples/bench/kid/base.kid new file mode 100644 --- /dev/null +++ b/lib3/Mako-0.7.3/examples/bench/kid/base.kid @@ -0,0 +1,15 @@ + + +

      + Hello, ${name}! +

      + + + + ${item} +